diff options
Diffstat (limited to '')
849 files changed, 58243 insertions, 0 deletions
diff --git a/.ansible-lint b/.ansible-lint new file mode 100644 index 0000000..678a0d4 --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,125 @@ +--- +# .ansible-lint + +profile: null # min, basic, moderate,safety, shared, production + +# Allows dumping of results in SARIF format +# sarif_file: result.sarif + +# exclude_paths included in this file are parsed relative to this file's location +# and not relative to the CWD of execution. CLI arguments passed to the --exclude +# option are parsed relative to the CWD of execution. +exclude_paths: + - .cache/ # implicit unless exclude_paths is defined in config + - .github/ + - test/fixtures/formatting-before/ + - test/fixtures/formatting-prettier/ +# parseable: true +# quiet: true +# strict: true +# verbosity: 1 + +# Mock modules or roles in order to pass ansible-playbook --syntax-check +mock_modules: + - zuul_return + # note the foo.bar is invalid as being neither a module or a collection + - fake_namespace.fake_collection.fake_module + - fake_namespace.fake_collection.fake_module.fake_submodule +mock_roles: + - mocked_role + - author.role_name # old standalone galaxy role + - fake_namespace.fake_collection.fake_role # role within a collection + +# Enable checking of loop variable prefixes in roles +loop_var_prefix: "^(__|{role}_)" + +# Enforce variable names to follow pattern below, in addition to Ansible own +# requirements, like avoiding python identifiers. To disable add `var-naming` +# to skip_list. +var_naming_pattern: "^[a-z_][a-z0-9_]*$" + +use_default_rules: true +# Load custom rules from this specific folder +# rulesdir: +# - ./rule/directory/ + +# Ansible-lint is able to recognize and load skip rules stored inside +# `.ansible-lint-ignore` files. To skip a rule just enter filename and tag, +# like "playbook.yml package-latest" on a new line. Optionally you can add +# comments after the tag, prefixed by "#". We discourage the use of skip_list +# below because that will hide violations from the output. When putting ignores +# inside the ignore file, they are marked as ignored, but still visible, making +# it easier to address later. +skip_list: + - skip_this_tag + +# Ansible-lint does not automatically load rules that have the 'opt-in' tag. +# You must enable opt-in rules by listing each rule 'id' below. +enable_list: + - args + - empty-string-compare # opt-in + - no-log-password # opt-in + - no-same-owner # opt-in + - name[prefix] # opt-in + # add yaml here if you want to avoid ignoring yaml checks when yamllint + # library is missing. Normally its absence just skips using that rule. + - yaml +# Report only a subset of tags and fully ignore any others +# tags: +# - jinja[spacing] + +# Ansible-lint does not fail on warnings from the rules or tags listed below +warn_list: + - skip_this_tag + - experimental # experimental is included in the implicit list + # - role-name + # - yaml[document-start] # you can also use sub-rule matches + +# Some rules can transform files to fix (or make it easier to fix) identified +# errors. `ansible-lint --write` will reformat YAML files and run these transforms. +# By default it will run all transforms (effectively `write_list: ["all"]`). +# You can disable running transforms by setting `write_list: ["none"]`. +# Or only enable a subset of rule transforms by listing rules/tags here. +# write_list: +# - all + +# Offline mode disables installation of requirements.yml and schema refreshing +offline: true + +# Return success if number of violations compared with previous git +# commit has not increased. This feature works only in git +# repositories. +progressive: false + +# Define required Ansible's variables to satisfy syntax check +extra_vars: + foo: bar + multiline_string_variable: | + line1 + line2 + complex_variable: ":{;\t$()" + +# Uncomment to enforce action validation with tasks, usually is not +# needed as Ansible syntax check also covers it. +# skip_action_validation: false + +# List of additional kind:pattern to be added at the top of the default +# match list, first match determines the file kind. +kinds: + # - playbook: "**/examples/*.{yml,yaml}" + # - galaxy: "**/folder/galaxy.yml" + # - tasks: "**/tasks/*.yml" + # - vars: "**/vars/*.yml" + # - meta: "**/meta/main.yml" + - yaml: "**/*.yaml-too" + +# List of additional collections to allow in only-builtins rule. +# only_builtins_allow_collections: +# - example_ns.example_collection + +# List of additions modules to allow in only-builtins rule. +# only_builtins_allow_modules: +# - example_module + +# Allow setting custom prefix for name[prefix] rule +task_name_prefix: "{stem} | " diff --git a/.ansible-lint-ignore b/.ansible-lint-ignore new file mode 100644 index 0000000..dae2afe --- /dev/null +++ b/.ansible-lint-ignore @@ -0,0 +1,3 @@ +# See https://ansible-lint.readthedocs.io/configuring/#ignoring-rules-for-entire-files +playbook2.yml package-latest # comment +playbook2.yml foo-bar diff --git a/.config/ansible-lint.spec b/.config/ansible-lint.spec new file mode 100644 index 0000000..22a46f9 --- /dev/null +++ b/.config/ansible-lint.spec @@ -0,0 +1,73 @@ +# spell-checker:ignore bcond pkgversion buildrequires autosetup PYTHONPATH noarch buildroot bindir sitelib numprocesses clib +# All tests require Internet access +# to test in mock use: --enable-network --with check +# to test in a privileged environment use: +# --with check --with privileged_tests +%bcond_with check +%bcond_with privileged_tests + +Name: ansible-lint +Version: VERSION_PLACEHOLDER +Release: 1%{?dist} +Summary: Ansible-lint checks ansible content for common mistakes + +License: GPL-3.0-or-later AND MIT +URL: https://github.com/ansible/ansible-lint +Source0: %{pypi_source} + +BuildArch: noarch + +BuildRequires: python%{python3_pkgversion}-devel +%if %{with check} +# These are required for tests: +BuildRequires: python%{python3_pkgversion}-pytest +BuildRequires: python%{python3_pkgversion}-pytest-xdist +BuildRequires: python%{python3_pkgversion}-libselinux +BuildRequires: git-core +%endif +Requires: git-core + + +%description +Ansible-lint checks ansible content for practices and behaviors that could +potentially be improved. + +%prep +%autosetup + + +%generate_buildrequires +%pyproject_buildrequires + + +%build +%pyproject_wheel + + +%install +%pyproject_install +%pyproject_save_files ansiblelint + + +%check +# Don't try to import tests that import pytest which isn't available at runtime +%pyproject_check_import -e 'ansiblelint.testing*' -e 'ansiblelint.rules.conftest' +%if %{with check} +%pytest \ + -v \ + --disable-pytest-warnings \ + --numprocesses=auto \ +%if %{with privileged_tests} + tests +%else + tests/unit +%endif +%endif + + +%files -f %{pyproject_files} +%{_bindir}/ansible-lint +%license COPYING docs/licenses/LICENSE.mit.txt +%doc docs/ README.md + +%changelog diff --git a/.config/dictionary.txt b/.config/dictionary.txt new file mode 100644 index 0000000..3a2184b --- /dev/null +++ b/.config/dictionary.txt @@ -0,0 +1,390 @@ +Adrián +Autobuild +CLICOLOR +CODENOTIFY +CODEOWNERS +CTYPE +Cacqueray +Chamoulaud +DISTRO +DOTGLOB +ENVVAR +EPIPE # linux +Fimport +Jython +MYTAG +PYTHONBREAKPOINT +PYTHONIOENCODING +PYTHONPYCACHEPREFIX +REQPASS +RULEDIRS +RUNLEVEL +Renderable +Representer +SIGUSR1 +SRCROOT +Sbarnea +Sorin +Sshell +TOXENV +TYPECHECK +Taskfiles +Tsukinowa +Tóth +WSLENV +aarch64 +abspath +addoption +addopts +ansiblelint +apidoc +apport +argparsing +argspecs +arxcruz +auditd +autobuild +autoclass +autodetected +autodiscovery +autodoc +autofix +autorefs +autoupdate +awcrosby +backports +backticks +bdist +becomeuserabove +bindep +blockincludes +blockinfile +bools +boto +bracketsmatchtest +bracketsmatchtestfile +buildinfo +buildset +builtins +cachier +capfd +caplog +capsys +cffi +chdir +chgrp +chkconfig +chunksize +clib +codeclimate +codecov +codenotify +codeql +codespell +colorama +colsystem +commandline +commitlint +commonmark +compat +conftest +coveragerc +cpus +cpuset +createfile +darglint +dataclasses +dbservers +deannotate +debops +decryptable +delenv +dellemc +denormalize +deps +devel +dharmabumstead +dirhtml +dists +distutils +doas +docstrings +doctest +doctrees +docutils +dotconfig +dotslash +drybjed +dzdo +ematcher +ematchtestfile +envrc +execnet +extlinks +facelessuser +facter +fakerole +fileglob +filelock +filesspot +filetree +fips +firewalld +fontawesome +formatstr +formetting +formsyntax +fqcn +fqrn +fulltoc +fullwidth +gecos +geerlingguy +getmatches +globbing +globmatch +groupname +hostkey +hostnames +hostvars +htmldir +htmlproofer +htpasswd +hwchksum +hwcksum +idempotency +importlib +iniconfig +inlinehilite +insertafter +ipwrap +isclass +iscsi +isdir +isdisjoint +iskeyword +isort +jsonfile +jsonschema +junitxml +keepends +keyserver +konstruktoid +kubernetes +kubevirt +languageservice +letsencrypt +levelname +libbzip +libera +libyaml +lineinfile +linenums +linkcheck +lintable +lintables +literalinclude +localectl +machinectl +magiclink +markdownlint +matchdir +matcherror +matchlines +matchtask +matchtasks +matchvar +matchyaml +maxdepth +minversion +mkdir +mkdocs +mkdocstrings +mkdtemp +mockings +mockreturn +modifyitems +modindex +moduleauthor +mypy +myrole +namedtempfile +nestedincludes +netcommon +nilashishc +nitpicky +nocolor +nodeps +noexist +nomatches +nomatchestest +noqa +norole +nostderr +notest +nxos +octal +octals +opensearch +openshift +outdir +outlen +pageview +parseable +pathex +pathlib +pathspec +pbrun +pfexec +pickleable +pipdeptree +pipefail +piptools +pipx +pkgcache # linux +pkgs +placefolder +pluggy +pluginmanager +podman +prerun +prettierignore +programoutput +psutil +pyargs +pycache +pycharm +pyenv +pygments +pylint +pylintrc +pymdown +pymdownx +pypa +pyparsing +pypi +pyproject +pyproject.toml +pypy +pyright +pytest +pyupgrade +pyyaml +redirections +reexec +regexes +releasenotes +relpath +reportversion +representer +reqs +returncode +rmtree +robertdebock +rolepath +roundtrip +ruamel +rulebook +rulebooks +ruledirs +rulesdir +rulesdirs +ruleset +runas +sarif +scalarint +schemafile +sdist +sdists +sectionauthor +seealso +setenv +setuptools +shortdesc +showlocals +shutil +signoff +simpletask +skippable +skiptasks +skiputils +slaveinput +sortfunc +sourcegraph +srpm +ssbarnea +stylesheet +subdir +subelements +subresults +subschema +subschemas +substrs +subtest +superfences +supervisorctl +synchronize +sysvinit +taskfile +taskhandler +taskimports +taskincludes +taskshandlers +templatevars +templating +testinfra +testmon +testns +testpath +testpaths +testproject +testpypi +testrun +timesyncd +tmpfs +toctree +toidentifier +tomli +toolset +tripleo +typehint +typehints +ulimits +uncook +ungrouped +unignored +unimported +unindented +uninstallation +unjinja +unlex +unnormalized +unskippable +unspaced +unsubscriptable +untemplated +userbase +uwsgi +validable +varname +varnotset +varset +varsfile +varstring +varunset +venv +viewcode +virtnet +virtualenv +virtualenvs +wcmatch +webserver +webservers +willthames +workdir +worktree +xdist +xfail +xunit +yatesr +zuul diff --git a/.config/requirements-docs.txt b/.config/requirements-docs.txt new file mode 100644 index 0000000..520919c --- /dev/null +++ b/.config/requirements-docs.txt @@ -0,0 +1,12 @@ +cairosvg +markdown-exec>=1.0.0 +mkdocs-gen-files>=0.4.0 +mkdocs-htmlproofer-plugin +mkdocs-material-extensions>=1.1.1 +mkdocs-material>=9.0.6 +mkdocs>=1.4.2 +mkdocstrings-python>=0.8.3 +mkdocstrings>=0.20.0 +pillow +pipdeptree>=2.3.3 +pymdown-extensions>=9.9.2 diff --git a/.config/requirements-lock.txt b/.config/requirements-lock.txt new file mode 100644 index 0000000..4d06524 --- /dev/null +++ b/.config/requirements-lock.txt @@ -0,0 +1,39 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --no-annotate --output-file=.config/requirements-lock.txt --resolver=backtracking --strip-extras --unsafe-package=ruamel-yaml-clib pyproject.toml +# +ansible-core==2.14.2 ; python_version >= "3.9" +attrs==22.2.0 +black==22.12.0 +bracex==2.3.post1 +cffi==1.15.1 +click==8.1.3 +cryptography==39.0.1 +filelock==3.9.0 +jinja2==3.1.2 +jsonschema==4.17.3 +markdown-it-py==2.1.0 +markupsafe==2.1.1 +mdurl==0.1.2 +mypy-extensions==0.4.3 +packaging==22.0 +pathspec==0.10.3 +platformdirs==2.6.2 +pycparser==2.21 +pygments==2.13.0 +pyrsistent==0.19.3 +pyyaml==6.0 +resolvelib==0.8.1 +rich==13.2.0 +ruamel-yaml==0.17.21 +setuptools==65.6.3 +subprocess-tee==0.4.1 +tomli==2.0.1 +typing-extensions==4.4.0 +wcmatch==8.4.1 +yamllint==1.28.0 + +# The following packages are considered to be unsafe in a requirements file: +# ruamel-yaml-clib diff --git a/.config/requirements-test.txt b/.config/requirements-test.txt new file mode 100644 index 0000000..a89d450 --- /dev/null +++ b/.config/requirements-test.txt @@ -0,0 +1,13 @@ +black # IDE support +coverage-enable-subprocess # see https://github.com/nedbat/coveragepy/issues/1341#issuecomment-1228942657 +coverage[toml] >= 6.4.4 +flake8 # IDE support +flake8-future-annotations # IDE support +mypy # IDE support +psutil # soft-dep of pytest-xdist +pylint # IDE support +pytest >= 7.2.0 +pytest-mock +pytest-plus >= 0.2 # for PYTEST_REQPASS +pytest-xdist >= 2.1.0 +types-pyyaml # IDE support diff --git a/.config/requirements.in b/.config/requirements.in new file mode 100644 index 0000000..1f34a8b --- /dev/null +++ b/.config/requirements.in @@ -0,0 +1,15 @@ +# Special order section for helping pip: +will-not-work-on-windows-try-from-wsl-instead; platform_system=='Windows' +ansible-core>=2.12.0,<2.14.0; python_version<'3.9' # GPLv3 +ansible-core>=2.12.0; python_version>='3.9' # GPLv3 +# alphabetically sorted: +black>=22.8.0 # MIT +filelock>=3.3.0 # The Unlicense +jsonschema>=4.10.0 # MIT, version needed for improved errors +packaging>=21.3 # Apache-2.0,BSD-2-Clause +pyyaml>=5.4.1 # MIT (centos 9 has 5.3.1) +rich>=12.0.0 # MIT +ruamel.yaml>=0.17.21,<0.18 # MIT, next version is planned to have breaking changes +subprocess-tee>=0.4.1 # MIT, used by ansible-compat +yamllint >= 1.26.3 # GPLv3 +wcmatch>=8.1.2 # MIT diff --git a/.config/requirements.txt b/.config/requirements.txt new file mode 100644 index 0000000..7b85efe --- /dev/null +++ b/.config/requirements.txt @@ -0,0 +1,101 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --extra=docs --extra=test --no-annotate --output-file=.config/requirements.txt --resolver=backtracking --strip-extras --unsafe-package=ruamel-yaml-clib pyproject.toml +# +ansible-core==2.14.2 ; python_version >= "3.9" +astroid==2.14.2 +attrs==22.2.0 +beautifulsoup4==4.11.2 +black==23.1.0 +bracex==2.3.post1 +cairocffi==1.4.0 +cairosvg==2.6.0 +certifi==2022.12.7 +cffi==1.15.1 +charset-normalizer==3.0.1 +click==8.1.3 +colorama==0.4.6 +coverage==7.1.0 +coverage-enable-subprocess==1.0 +cryptography==39.0.1 +cssselect2==0.7.0 +defusedxml==0.7.1 +dill==0.3.6 +exceptiongroup==1.1.0 +execnet==1.9.0 +filelock==3.9.0 +flake8==6.0.0 +flake8-future-annotations==1.1.0 +ghp-import==2.1.0 +griffe==0.25.4 +idna==3.4 +importlib-metadata==6.0.0 +iniconfig==2.0.0 +isort==5.12.0 +jinja2==3.1.2 +jsonschema==4.17.3 +lazy-object-proxy==1.9.0 +markdown==3.3.7 +markdown-exec==1.2.0 +markdown-it-py==2.1.0 +markupsafe==2.1.2 +mccabe==0.7.0 +mdurl==0.1.2 +mergedeep==1.3.4 +mkdocs==1.4.2 +mkdocs-autorefs==0.4.1 +mkdocs-gen-files==0.4.0 +mkdocs-htmlproofer-plugin==0.10.2 +mkdocs-material==9.0.12 +mkdocs-material-extensions==1.1.1 +mkdocstrings==0.20.0 +mkdocstrings-python==0.8.3 +mypy==1.0.0 +mypy-extensions==1.0.0 +packaging==23.0 +pathspec==0.11.0 +pillow==9.4.0 +pipdeptree==2.3.3 +platformdirs==3.0.0 +pluggy==1.0.0 +psutil==5.9.4 +pycodestyle==2.10.0 +pycparser==2.21 +pyflakes==3.0.1 +pygments==2.14.0 +pylint==2.16.1 +pymdown-extensions==9.9.2 +pyrsistent==0.19.3 +pytest==7.2.1 +pytest-mock==3.10.0 +pytest-plus==0.4.0 +pytest-xdist==3.2.0 +python-dateutil==2.8.2 +pyyaml==6.0 +pyyaml-env-tag==0.1 +regex==2022.10.31 +requests==2.28.2 +resolvelib==0.8.1 +rich==13.3.1 +ruamel-yaml==0.17.21 +setuptools==67.2.0 +six==1.16.0 +soupsieve==2.3.2.post1 +subprocess-tee==0.4.1 +tinycss2==1.2.1 +tomli==2.0.1 +tomlkit==0.11.6 +types-pyyaml==6.0.12.6 +typing-extensions==4.4.0 +urllib3==1.26.14 +watchdog==2.2.1 +wcmatch==8.4.1 +webencodings==0.5.1 +wrapt==1.14.1 +yamllint==1.29.0 +zipp==3.13.0 + +# The following packages are considered to be unsafe in a requirements file: +# ruamel-yaml-clib diff --git a/.darglint b/.darglint new file mode 100644 index 0000000..3b2a724 --- /dev/null +++ b/.darglint @@ -0,0 +1,6 @@ +[darglint] +docstring_style=sphinx +ignore=DAR101 +enable=DAR104 + +strictness=long @@ -0,0 +1,85 @@ +# spell-checker:ignore testmon tmontmp +[flake8] + +# Don't even try to analyze these: +extend-exclude = + # No need to traverse egg info dir + *.egg-info, + # GitHub configs + .github, + # Cache files of MyPy + .mypy_cache, + # Cache files of pytest + .pytest_cache, + # Temp dir of pytest-testmon + .tmontmp, + # Occasional virtualenv dir + .venv + # VS Code + .vscode, + # Temporary build dir + build, + # This contains sdists and wheels of ansible-lint that we don't want to check + dist, + # Occasional virtualenv dir + env, + # Metadata of `pip wheel` cmd is autogenerated + pip-wheel-metadata, + # vendored code + src/ansiblelint/_vendor/*, + +# Let's not over-complicate the code: +max-complexity = 10 + +# Accessibility/large fonts and PEP8 friendly: +#max-line-length = 79 +# Accessibility/large fonts and PEP8 unfriendly: +max-line-length = 100 + +# The only allowed ignores are related to black and isort +# https://black.readthedocs.io/en/stable/the_black_code_style.html#line-length +# "H" are generated by hacking plugin, which is not black compatible +extend-ignore = + E203, + E501, + # complexity is also measured by pylint: too-many-branches + C901, + # We use type annotations instead + DAR101, + DAR104, + # https://github.com/terrencepreilly/darglint/issues/165 + DAR301, + # duplicate of pylint W0611 (unused-import) + F401, + # duplicate of pylint E0602 (undefined-variable) + F821, + # duplicate of pylint W0612 (unused-variable) + F841, + H, + +# Allow certain violations in certain files: +per-file-ignores = + # FIXME: D102 Missing docstring in public method + src/ansiblelint/cli.py: D102 + src/ansiblelint/formatters/__init__.py: D102 + src/ansiblelint/rules/*.py: D102 + src/ansiblelint/rules/__init__.py: D102 + + # FIXME: C901 Function is too complex + # FIXME: refactor _defaults_from_yamllint_config using match case + # once python 3.10 is mandatory + # Ref: https://github.com/ansible/ansible-lint/pull/2077 + src/ansiblelint/yaml_utils.py: C901 + + # FIXME: drop these once they're fixed + # Ref: https://github.com/ansible/ansible-lint/issues/725 + test/*: D102 + +# flake8-pytest-style +# PT001: +pytest-fixture-no-parentheses = true +# PT006: +pytest-parametrize-names-type = tuple +# PT007: +pytest-parametrize-values-type = tuple +pytest-parametrize-values-row-type = tuple diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000..8bbfc7d --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: 8805eab95ba1a4e7103507cb90e80877921dbeb3 +node-date: 2023-02-17T11:51:00+00:00 +describe-name: v6.13.1 +ref-names: tag: v6.13.1 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..2e46433 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,7 @@ +# Force LF line endings for text files +* text=auto eol=lf + +*.png binary + +# Needed for setuptools-scm-git-archive +.git_archival.txt export-subst diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..3d3aa8e --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @ansible/devtools @ansible/ansible-lint-external-contributors diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..0164155 --- /dev/null +++ b/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,3 @@ +# Community Code of Conduct + +Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..490de20 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,33 @@ +# Issue Type + +- Bug report +- Feature request + +# Ansible and Ansible Lint details + +``` +ansible --version +ansible-lint --version +``` + +- ansible installation method: one of source, pip, OS package +- ansible-lint installation method: one of source, pip, OS package + +# Desired Behavior + +Please give some details of the feature being requested +or what should happen if providing a bug report + +Possible security bugs should be reported via email to `security@ansible.com` + +# Actual Behavior (Bug report only) + +Please give some details of what is actually happening. +Include a [minimum complete verifiable example] with: + +- playbook +- output of running ansible-lint +- if you're getting a stack trace, output of + `ansible-playbook --syntax-check playbook` + +[minimum complete verifiable example]: http://stackoverflow.com/help/mcve diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..041a61a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,70 @@ +--- +name: Bug report +about: > + Create a bug report. Ensure that it does reproduce on the main branch with + python >=3.9. For anything else, please use the discussion link below. +labels: bug, new +--- + +<!--- Verify first that your issue is not already reported on GitHub --> +<!--- Also test if the latest release and main branch are affected too --> + +##### Summary + +<!--- Explain the problem briefly below --> + +##### Issue Type + +- Bug Report + +##### OS / ENVIRONMENT + +<!--- Paste verbatim output between triple backticks --> + +```console (paste below) +ansible-lint --version +``` + +<!--- Provide all relevant information below, e.g. target OS versions, network + device firmware, etc. --> + +- ansible installation method: one of source, pip, OS package +- ansible-lint installation method: one of source, pip, OS package + +##### STEPS TO REPRODUCE + +<!--- Describe exactly how to reproduce the problem, using a minimal test case --> + +<!--- Paste example playbooks or commands between triple backticks below --> + +```console (paste below) + +``` + +<!--- HINT: You can paste gist.github.com links for larger files --> + +##### Desired Behavior + +<!--- Describe what you expected to happen when running the steps above --> + +Possible security bugs should be reported via email to `security@ansible.com` + +##### Actual Behavior + +<!--- Describe what happened. If possible run with extra verbosity (-vvvv) --> + +Please give some details of what is happening. +Include a [minimum complete verifiable example] with: + +- minimized playbook to reproduce the error +- the output of running ansible-lint including the command line used +- if you're getting a stack trace, also the output of + `ansible-playbook --syntax-check playbook` + +<!--- Paste verbatim command output between triple backticks --> + +```paste below + +``` + +[minimum complete verifiable example]: http://stackoverflow.com/help/mcve diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..b7218f7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,23 @@ +--- +# Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser +blank_issues_enabled: false # default is true +contact_links: + - name: Feature requests + url: https://github.com/ansible/ansible-lint/discussions/categories/ideas + about: Suggest an idea for this project + - name: Discussions + url: https://github.com/ansible/ansible-lint/discussions/ + about: Any kind of questions should go on the forum. + - name: Security bug report + url: https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html + about: | + Please learn how to report security vulnerabilities here. + + For all security related bugs, email security@ansible.com + instead of using this issue tracker and you will receive + a prompt response. + + For more information, see https://docs.ansible.com/ansible/latest/community/reporting_bugs_and_features.html + - name: Ansible Code of Conduct + url: https://docs.ansible.com/ansible/latest/community/code_of_conduct.html + about: Be nice to other members of the community. Behave. diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 0000000..f94d78a --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,14 @@ +# Security Policy + +## Supported Versions + +Ansible applies security fixes according to the 3-versions-back support +policy. Please find more information in [our docs]. + +## Reporting a Vulnerability + +We encourage responsible disclosure practices for security +vulnerabilities. Please read our [policies for reporting bugs](https://docs.ansible.com/ansible/devel/community/reporting_bugs_and_features.html#reporting-a-bug) +if you want to report a security issue that might affect Ansible. + +[our docs]: https://docs.ansible.com/ansible-core/devel/reference_appendices/release_and_maintenance.html#ansible-core-release-cycle diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..6a4dae2 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,19 @@ +--- +version: 2 +updates: + - package-ecosystem: pip + directory: /.config/ + schedule: + day: sunday + interval: weekly + labels: + - dependabot-deps-updates + - skip-changelog + versioning-strategy: lockfile-only + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: daily + labels: + - "dependencies" + - "skip-changelog" diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000..11fa614 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,3 @@ +--- +# see https://github.com/ansible/devtools +_extends: ansible/devtools diff --git a/.github/workflows/ack.yml b/.github/workflows/ack.yml new file mode 100644 index 0000000..5e7b9f5 --- /dev/null +++ b/.github/workflows/ack.yml @@ -0,0 +1,10 @@ +--- +# See https://github.com/ansible/devtools/blob/main/.github/workflows/ack.yml +name: ack +on: + pull_request_target: + types: [opened, labeled, unlabeled, synchronize] + +jobs: + ack: + uses: ansible/devtools/.github/workflows/ack.yml@main diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml new file mode 100644 index 0000000..1a01af8 --- /dev/null +++ b/.github/workflows/push.yml @@ -0,0 +1,13 @@ +--- +# See https://github.com/ansible/devtools/blob/main/.github/workflows/push.yml +name: push +on: + push: + branches: + - main + - "releases/**" + - "stable/**" + +jobs: + ack: + uses: ansible/devtools/.github/workflows/push.yml@main diff --git a/.github/workflows/redirects.yml b/.github/workflows/redirects.yml new file mode 100644 index 0000000..28a9a26 --- /dev/null +++ b/.github/workflows/redirects.yml @@ -0,0 +1,33 @@ +--- +# Sync RTD redirects +name: redirects + +on: + push: + branches: + - main + paths: + - docs/redirects.yml + - .github/workflows/redirects.yml + + # Manually triggered using GitHub's UI + workflow_dispatch: + +jobs: + docs: + environment: release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + + - name: Upgrade Python toolchain + run: python3 -m pip install --upgrade pip setuptools wheel + + - name: Install readthedocs-cli + run: python3 -m pip install readthedocs-cli + + - name: Sync redirects + run: rtd projects ansible-lint redirects sync -f docs/redirects.yml --wet-run + env: + RTD_TOKEN: ${{ secrets.RTD_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..111f15f --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,47 @@ +--- +# cspell:ignore mislav +name: release + +on: + release: + types: [published] + +jobs: + pypi: + name: Publish to PyPI registry + environment: release + runs-on: ubuntu-22.04 + + env: + FORCE_COLOR: 1 + PY_COLORS: 1 + TOXENV: pkg + + steps: + - name: Switch to using Python 3.9 by default + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Install tox + run: python3 -m pip install --user "tox>=4.0.0" + - name: Check out src from Git + uses: actions/checkout@v3 + with: + fetch-depth: 0 # needed by setuptools-scm + submodules: true + - name: Build dists + run: python -m tox + - name: Publish to pypi.org + if: >- # "create" workflows run separately from "push" & "pull_request" + github.event_name == 'release' + uses: pypa/gh-action-pypi-publish@release/v1 + with: + password: ${{ secrets.pypi_password }} + + - name: Bump homebrew formula + uses: mislav/bump-homebrew-formula-action@v2 + with: + # A PR will be sent to github.com/Homebrew/homebrew-core to update this formula: + formula-name: ansible-lint + env: + COMMITTER_TOKEN: ${{ secrets.COMMITTER_TOKEN }} diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml new file mode 100644 index 0000000..4fe6da7 --- /dev/null +++ b/.github/workflows/tox.yml @@ -0,0 +1,233 @@ +--- +name: tox + +on: + create: # is used for publishing to PyPI and TestPyPI + tags: # any tag regardless of its name, no branches + - "**" + push: # only publishes pushes to the main branch to TestPyPI + branches: # any integration branch but not tag + - "main" + pull_request: + branches: + - "main" + release: + types: + - published # It seems that you can publish directly without creating + schedule: + - cron: 1 0 * * * # Run daily at 0:01 UTC + # Run every Friday at 18:02 UTC + # https://crontab.guru/#2_18_*_*_5 + # - cron: 2 18 * * 5 + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +env: + FORCE_COLOR: 1 # tox, pytest, ansible-lint + PY_COLORS: 1 + +jobs: + pre: + name: pre + runs-on: ubuntu-22.04 + outputs: + matrix: ${{ steps.generate_matrix.outputs.matrix }} + steps: + - name: Determine matrix + id: generate_matrix + uses: coactions/dynamic-matrix@v1 + with: + min_python: "3.9" + max_python: "3.11" + other_names: | + lint + pkg,hook,docs + schemas + eco + py-devel + platforms: linux,macos + + build: + name: ${{ matrix.name }} + runs-on: ${{ matrix.os || 'ubuntu-22.04' }} + needs: pre + defaults: + run: + shell: ${{ matrix.shell || 'bash'}} + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.pre.outputs.matrix) }} + # max-parallel: 5 + # The matrix testing goal is to cover the *most likely* environments + # which are expected to be used by users in production. Avoid adding a + # combination unless there are good reasons to test it, like having + # proof that we failed to catch a bug by not running it. Using + # distribution should be preferred instead of custom builds. + env: + # vars safe to be passed to wsl: + WSLENV: FORCE_COLOR:PYTEST_REQPASS:TOXENV:GITHUB_STEP_SUMMARY + # Number of expected test passes, safety measure for accidental skip of + # tests. Update value if you add/remove tests. + PYTEST_REQPASS: 791 + + steps: + - name: Activate WSL1 + if: "contains(matrix.shell, 'wsl')" + uses: Vampire/setup-wsl@v2 + + - name: MacOS workaround for https://github.com/actions/virtual-environments/issues/1187 + if: ${{ matrix.os == 'macOS-latest' }} + run: | + sudo sysctl -w net.link.generic.system.hwcksum_tx=0 + sudo sysctl -w net.link.generic.system.hwcksum_rx=0 + + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # needed by setuptools-scm + submodules: true + + - name: Set pre-commit cache + uses: actions/cache@v3 + if: ${{ matrix.passed_name == 'lint' }} + with: + path: | + ~/.cache/pre-commit + key: pre-commit-${{ matrix.name || matrix.passed_name }}-${{ hashFiles('.pre-commit-config.yaml') }} + + - name: Set galaxy cache + uses: actions/cache@v3 + if: ${{ startsWith(matrix.passed_name, 'py') }} + with: + path: | + examples/playbooks/collections/*.tar.gz + examples/playbooks/collections/ansible_collections + key: galaxy-${{ hashFiles('examples/playbooks/collections/requirements.yml') }} + + - name: Set up Python ${{ matrix.python_version || '3.9' }} + if: "!contains(matrix.shell, 'wsl')" + uses: actions/setup-python@v4 + with: + cache: pip + python-version: ${{ matrix.python_version || '3.9' }} + + - uses: actions/setup-node@v3 + with: + node-version: 18 + cache: "npm" + cache-dependency-path: test/schemas/package-lock.json + + - name: Run ./tools/test-setup.sh + run: ./tools/test-setup.sh + + - name: Install tox + run: | + python3 -m pip install --upgrade pip + python3 -m pip install --upgrade "tox>=4.0.0" + + - name: Log installed dists + run: python3 -m pip freeze --all + + - name: Initialize tox envs ${{ matrix.passed_name }} + run: python3 -m tox --notest --skip-missing-interpreters false -vv -e ${{ matrix.passed_name }} + timeout-minutes: 5 # average is under 1, but macos can be over 3 + + # sequential run improves browsing experience (almost no speed impact) + - name: tox -e ${{ matrix.passed_name }} + run: python3 -m tox -e ${{ matrix.passed_name }} + + - name: Combine coverage data + if: ${{ startsWith(matrix.passed_name, 'py') }} + # produce a single .coverage file at repo root + run: tox -e coverage + + - name: Upload coverage data + if: ${{ startsWith(matrix.passed_name, 'py') }} + uses: codecov/codecov-action@v3 + with: + name: ${{ matrix.passed_name }} + fail_ci_if_error: false # see https://github.com/codecov/codecov-action/issues/598 + token: ${{ secrets.CODECOV_TOKEN }} + verbose: true # optional (default = false) + + - name: Archive logs + uses: actions/upload-artifact@v3 + with: + name: logs.zip + path: .tox/**/log/ + # https://github.com/actions/upload-artifact/issues/123 + continue-on-error: true + + - name: Report failure if git reports dirty status + run: | + git checkout HEAD -- src/ansiblelint/schemas/__store__.json + if [[ -n $(git status -s) ]]; then + # shellcheck disable=SC2016 + echo -n '::error file=git-status::' + printf '### Failed as git reported modified and/or untracked files\n```\n%s\n```\n' "$(git status -s)" | tee -a "$GITHUB_STEP_SUMMARY" + exit 99 + fi + # https://github.com/actions/toolkit/issues/193 + codeql: + name: codeql + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ["python"] + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" + + check: # This job does nothing and is only used for the branch protection + if: always() + permissions: + pull-requests: write # allow codenotify to comment on pull-request + + needs: + - build + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + jobs: ${{ toJSON(needs) }} + + - name: Check out src from Git + uses: actions/checkout@v3 + + - name: Notify repository owners about lint change affecting them + uses: sourcegraph/codenotify@v0.6.4 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # https://github.com/sourcegraph/codenotify/issues/19 + continue-on-error: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e082ca2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,71 @@ +# Byte-compiled / optimized / DLL files +__pycache__ +*.py[co] +*$py.class + +# Packages +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib64/ +parts/ +pip-wheel-metadata +sdist/ +var/ +venv/ +*.egg-info/ +.installed.cfg +*.egg + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.tox + +# Needed for CLI tests +.sandbox + +# pyenv +.python-version + +# Coverage artifacts +.coverage +coverage*.xml +pip-wheel-metadata +.test-results/ + +# mypy +.mypy_cache + +# .cache is used by progressive mode +.cache + +# Generated by setuptools-scm +src/ansiblelint/_version.py + +# Unformatted fixtures, need to be added with force as we need to exclude +# them from being reformatted (.prettierignore is a symlink to .gitignore) +test/fixtures/formatting-before/ +# prettier should not edit this due to forcibly extra-long lines +examples/playbooks/vars/strings.transformed.yml + +# other +.DS_Store +.vscode +.idea +src/ansiblelint/_version.py +*.tar.gz +.pytest_cache +test/eco/CODENOTIFY.html +test/eco +test/schemas/node_modules +.envrc +collections +site +_readthedocs diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..19cc5f8 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule ".projects/ansible-compat"] + path = .projects/ansible-compat + url = https://github.com/ansible/ansible-compat diff --git a/.packit.yaml b/.packit.yaml new file mode 100644 index 0000000..206b6b1 --- /dev/null +++ b/.packit.yaml @@ -0,0 +1,44 @@ +--- +# spell-checker:ignore packit specfile copr epel +specfile_path: dist/ansible-lint.spec +actions: + create-archive: + # packit.dev service does have these module pre-installed: + - python3 -m build --sdist --outdir dist + - sh -c "ls dist/ansible-lint-*.tar.gz" + get-current-version: + - ./tools/get-version.sh + post-upstream-clone: + - "git submodule init" + - "git submodule update" + - ./tools/update-version.sh +srpm_build_deps: + - python3-build + - python3-setuptools_scm +jobs: + - job: copr_build + targets: + - fedora-rawhide-aarch64 # one on PR should be enough + trigger: pull_request + - job: copr_build + trigger: commit + branch: main + targets: + # See https://packit.dev/docs/configuration/#aliases + # API to get available targets: https://api.dev.testing-farm.io/v0.1/composes/public + - fedora-rawhide-x86_64 + - fedora-rawhide-aarch64 + - fedora-37-x86_64 + - fedora-37-aarch64 + # Missing python3-build see https://bugzilla.redhat.com/show_bug.cgi?id=2129071 + # - centos-stream-9-aarch64 + # - centos-stream-9-x86_64 + # - job: tests + # trigger: pull_request + # metadata: + # targets: + # - fedora-all + # - job: propose_downstream + # trigger: release + # metadata: + # dist-git-branch: master diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..71de0c4 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,243 @@ +--- +ci: + # format compatible with commitlint + autoupdate_commit_msg: "chore: pre-commit autoupdate" + autoupdate_schedule: monthly + autofix_commit_msg: | + chore: auto fixes from pre-commit.com hooks + + for more information, see https://pre-commit.ci + skip: + # https://github.com/pre-commit-ci/issues/issues/55 + - pip-compile + - schemas + submodules: true +exclude: > + (?x)^( + .config/requirements.*| + .vscode/extensions.json| + .vscode/settings.json| + examples/broken/encoding.j2| + test/schemas/data/licenses.json| + test/schemas/package-lock.json| + test/schemas/negative_test| + examples/broken/yaml-with-tabs/invalid-due-tabs.yaml| + examples/playbooks/collections/.* + )$ +repos: + - repo: meta + hooks: + - id: check-useless-excludes + - repo: https://github.com/pre-commit/mirrors-prettier + # keep it before yamllint + rev: v3.0.0-alpha.4 + hooks: + - id: prettier + # Temporary excludes so we can gradually normalize the formatting + exclude: > + (?x)^( + .*\.md$| + examples/other/some.j2.yaml| + examples/playbooks/example.yml| + examples/playbooks/multiline-brackets.*| + examples/playbooks/templates/not-valid.yaml| + examples/playbooks/with-umlaut-.*| + examples/playbooks/with-skip-tag-id.yml| + examples/yamllint/.*| + examples/playbooks/collections/.*| + test/fixtures/formatting-before/.*| + test/schemas/data/.*| + test/schemas/(negative_test|test)/.*\.md| + src/ansiblelint/schemas/(molecule|tasks|playbook).json| + src/ansiblelint/schemas/ansible-navigator-config.json + )$ + always_run: true + additional_dependencies: + - prettier + - prettier-plugin-toml + - prettier-plugin-sort-json + - repo: https://github.com/streetsidesoftware/cspell-cli + rev: v6.22.0 + hooks: + - id: cspell + # entry: codespell --relative + args: [--relative, --no-progress, --no-summary] + name: Spell check with cspell + - repo: https://github.com/sirosen/check-jsonschema + rev: 0.21.0 + hooks: + - id: check-github-workflows + - repo: https://github.com/pre-commit/pre-commit-hooks.git + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + # ignore formatting-prettier to have an accurate prettier comparison + exclude: > + (?x)^( + test/eco/.*.result| + examples/yamllint/.*| + test/fixtures/formatting-before/.*| + test/fixtures/formatting-prettier/.* + )$ + - id: trailing-whitespace + exclude: > + (?x)^( + examples/playbooks/(with-skip-tag-id|unicode).yml| + examples/playbooks/example.yml| + examples/yamllint/.*| + test/eco/.*.result| + test/fixtures/formatting-before/.* + )$ + - id: mixed-line-ending + - id: fix-byte-order-marker + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: debug-statements + language_version: python3 + - repo: https://github.com/codespell-project/codespell + rev: v2.2.2 + hooks: + - id: codespell + exclude: > + (?x)^( + .config/dictionary.txt| + examples/broken/encoding.j2| + test/schemas/negative_test/.*| + test/schemas/test/.*| + src/ansiblelint/schemas/.*\.json + )$ + additional_dependencies: + - tomli + - repo: https://github.com/adrienverge/yamllint.git + rev: v1.29.0 + hooks: + - id: yamllint + exclude: > + (?x)^( + examples/playbooks/templates/.*| + examples/yamllint/.*| + examples/other/some.j2.yaml| + examples/playbooks/collections/.*| + test/fixtures/formatting-before/.* + )$ + files: \.(yaml|yml)$ + types: [file, yaml] + entry: yamllint --strict + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + args: + # https://github.com/pre-commit/mirrors-isort/issues/9#issuecomment-624404082 + - --filter-files + - repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + language_version: python3 + - repo: https://github.com/pycqa/flake8.git + rev: 6.0.0 + hooks: + - id: flake8 + language_version: python3 + additional_dependencies: + - flake8-2020>=1.6.0 + # - flake8-black>=0.1.1 + - flake8-docstrings>=1.5.0 + - flake8-print>=5.0 + - flake8-pytest-style>=1.2.2 + - flake8-future-annotations>=0.0.3 + - repo: https://github.com/asottile/pyupgrade + # keep it after flake8 + rev: v3.3.1 + hooks: + - id: pyupgrade + args: ["--py38-plus"] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.0.0 + hooks: + - id: mypy + # empty args needed in order to match mypy cli behavior + args: [--strict] + additional_dependencies: + - ansible-compat + - black>=22.10.0 + - cryptography>=39.0.1 + - filelock + - jinja2 + - pytest>=7.2.0 + - rich>=13.2.0 + - ruamel.yaml + - subprocess-tee + - types-PyYAML + - types-jsonschema>=4.4.2 + - types-pkg_resources + - types-setuptools + - wcmatch + exclude: > + (?x)^( + test/local-content/.*| + plugins/.* + )$ + - repo: https://github.com/pycqa/pylint + rev: v2.16.1 + hooks: + - id: pylint + args: + - --output-format=colorized + additional_dependencies: + - ansible-compat + - ansible-core>=2.14.0 + - black>=22.10.0 + - docutils + - filelock + - jsonschema>=4.9.0 + - pytest>=7.2.0 + - pyyaml + - rich>=13.2.0 + - ruamel.yaml + - typing_extensions + - wcmatch + - yamllint + - repo: https://github.com/jazzband/pip-tools + rev: 6.12.2 + hooks: + - id: pip-compile + name: lock + always_run: true + entry: pip-compile --resolver=backtracking -q --no-annotate --output-file=.config/requirements-lock.txt pyproject.toml --strip-extras --unsafe-package ruamel-yaml-clib + language: python + files: ^.config\/requirements.*$ + alias: lock + stages: [manual] + language_version: "3.9" # minimal we support officially + additional_dependencies: + - pip>=22.3.1 + - id: pip-compile + name: deps + entry: pip-compile --resolver=backtracking -q --no-annotate --output-file=.config/requirements.txt pyproject.toml --extra docs --extra test --strip-extras --unsafe-package ruamel-yaml-clib + language: python + files: ^.config\/requirements.*$ + alias: deps + language_version: "3.9" # minimal we support officially + always_run: true + additional_dependencies: + - pip>=22.3.1 + - id: pip-compile + entry: pip-compile --resolver=backtracking -q --no-annotate --output-file=.config/requirements.txt pyproject.toml --extra docs --extra test --strip-extras --unsafe-package ruamel-yaml-clib --upgrade + language: python + always_run: true + files: ^.config\/requirements.*$ + alias: up + stages: [manual] + language_version: "3.9" # minimal we support officially + additional_dependencies: + - pip>=22.3.1 + - # keep at bottom as these are slower + repo: local + hooks: + - id: schemas + name: update json schemas + entry: python3 src/ansiblelint/schemas/__main__.py + language: python + stages: [manual] diff --git a/.pre-commit-hooks.yaml b/.pre-commit-hooks.yaml new file mode 100644 index 0000000..19ff947 --- /dev/null +++ b/.pre-commit-hooks.yaml @@ -0,0 +1,20 @@ +--- +# For use with pre-commit. +# See usage instructions at http://pre-commit.com + +- id: ansible-lint + name: Ansible-lint + description: This hook runs ansible-lint. + entry: python3 -m ansiblelint -v --force-color + language: python + # do not pass files to ansible-lint, see: + # https://github.com/ansible/ansible-lint/issues/611 + pass_filenames: false + always_run: true + additional_dependencies: + # https://github.com/pre-commit/pre-commit/issues/1526 + # If you want to use specific version of ansible-core or ansible, feel + # free to override `additional_dependencies` in your own hook config + # file. + - . + - ansible-core>=2.13.3 diff --git a/.prettierignore b/.prettierignore new file mode 120000 index 0000000..3e4e48b --- /dev/null +++ b/.prettierignore @@ -0,0 +1 @@ +.gitignore
\ No newline at end of file diff --git a/.prettierrc.yaml b/.prettierrc.yaml new file mode 100644 index 0000000..906d3d6 --- /dev/null +++ b/.prettierrc.yaml @@ -0,0 +1,18 @@ +--- +proseWrap: always +jsonRecursiveSort: true # prettier-plugin-sort-json +tabWidth: 2 +useTabs: false +overrides: + - files: + - "*.md" + options: + # compatibility with markdownlint + proseWrap: always + printWidth: 80 + - files: + - "*.yaml" + - "*.yml" + options: + # compatibility with yamllint + proseWrap: preserve diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..8262c4e --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,26 @@ +--- +version: 2 + +mkdocs: + fail_on_warning: true + configuration: mkdocs.yml + +build: + os: ubuntu-22.04 + tools: + python: "3.11" + commands: + - pip install --user tox + - python3 -m tox -e docs -- --strict --site-dir=_readthedocs/html/ +python: + system_packages: false + install: + - method: pip + path: tox + - method: pip + path: . + extra_requirements: + - docs +submodules: + include: all + recursive: true diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..50f3e17 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,18 @@ +{ + "recommendations": [ + "Tyriar.sort-lines", + "esbenp.prettier-vscode", + "hbenl.vscode-test-explorer", + "ms-python.isort", + "ms-python.python", + "ms-python.vscode-pylance", + "ms-vscode.live-server", + "redhat.ansible", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters", + "shardulm94.trailing-spaces", + "tamasfe.even-better-toml", + "timonwong.shellcheck", + "znck.grammarly", + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..90d62ac --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,45 @@ +{ + "[markdown]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "editor.formatOnSave": true, + "evenBetterToml.formatter.allowedBlankLines": 2, + "files.exclude": { + "*.egg-info": true, + ".pytest_cache": true, + ".tox": true, + "__pycache__": true, + "build": true + }, + "git.ignoreLimitWarning": true, + "grammarly.domain": "technical", + "grammarly.files.include": [ + "**/*.txt", + "**/*.md" + ], + "grammarly.hideUnavailablePremiumAlerts": true, + "grammarly.showExamples": true, + "python.analysis.exclude": [ + "build" + ], + "python.formatting.provider": "black", + "python.linting.flake8Args": [ + "--ignore=E501,W503" + ], + "python.linting.flake8Enabled": true, + "python.linting.mypyCategorySeverity.error": "Warning", + "python.linting.mypyEnabled": true, + "python.linting.pylintEnabled": true, + "python.terminal.activateEnvironment": true, + "python.testing.pytestEnabled": true, + "python.testing.unittestEnabled": false, + "sortLines.filterBlankLines": true, + "yaml.completion": true, + "yaml.customTags": [ + "!encrypted/pkcs1-oaep scalar", + "!vault scalar" + ], + "yaml.format.enable": false, + "yaml.validate": true, + "evenBetterToml.formatter.alignComments": false +} diff --git a/.yamllint b/.yamllint new file mode 100644 index 0000000..59c2fa5 --- /dev/null +++ b/.yamllint @@ -0,0 +1,22 @@ +--- +rules: + comments: + # prettier compatibility + min-spaces-from-content: 1 + document-start: + present: true + indentation: + level: error + indent-sequences: consistent + octal-values: + forbid-implicit-octal: true + forbid-explicit-octal: true +ignore: | + .tox + examples/playbooks/example.yml + examples/playbooks/syntax-error-string.yml + examples/playbooks/vars/not_decryptable.yml + test/schemas/negative_test/**/*.yml + test/schemas/test/**/*.yml + +# ignore added because this file includes on-purpose errors @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/> + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + <one line to give the program's name and a brief idea of what it does.> + Copyright (C) <year> <name of author> + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see <https://www.gnu.org/licenses/>. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + <program> Copyright (C) <year> <name of author> + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +<https://www.gnu.org/licenses/>. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +<https://www.gnu.org/licenses/why-not-lgpl.html>. diff --git a/DCO_1_1.md b/DCO_1_1.md new file mode 100644 index 0000000..0c7f40a --- /dev/null +++ b/DCO_1_1.md @@ -0,0 +1,44 @@ +# DCO + +All contributors must use `git commit --signoff` for any +commit to be merged, and agree that usage of --signoff constitutes +agreement with the terms of DCO 1.1, which appears below: + +``` +Developer Certificate of Origin +Version 1.1 + +Copyright (C) 2004, 2006 The Linux Foundation and its contributors. +1 Letterman Drive +Suite D4700 +San Francisco, CA, 94129 + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +``` diff --git a/README.md b/README.md new file mode 100644 index 0000000..563fa25 --- /dev/null +++ b/README.md @@ -0,0 +1,39 @@ +[![PyPI version](https://img.shields.io/pypi/v/ansible-lint.svg)](https://pypi.org/project/ansible-lint) +[![Ansible-lint rules explanation](https://img.shields.io/badge/Ansible--lint-rules-blue.svg)](https://ansible-lint.readthedocs.io/rules/) +[![Discussions](https://img.shields.io/badge/Discussions-gray.svg)](https://github.com/ansible/ansible-lint/discussions) +[![GitHub Actions CI/CD](https://github.com/ansible/ansible-lint/workflows/gh/badge.svg)](https://github.com/ansible/ansible-lint/actions?query=workflow%3Agh+branch%3Amain+event%3Apush) +[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/ansible/ansible-lint.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/ansible-community/ansible-lint/context:python) +[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) + +# Ansible-lint + +`ansible-lint` checks playbooks for practices and behavior that could +potentially be improved. As a community backed project ansible-lint supports +only the last two major versions of Ansible. + +[Visit the Ansible Lint docs site](https://ansible-lint.readthedocs.io/) + +# Contributing + +Please read [Contribution guidelines] if you wish to contribute. + +# Licensing + +The ansible-lint project is distributed as [GPLv3] due to use of [GPLv3] runtime +dependencies, like `ansible` and `yamllint`. + +For historical reasons, its own code-base remains licensed under a more +liberal [MIT] license and any contributions made are accepted as being made +under original [MIT] license. + +# Authors + +ansible-lint was created by [Will Thames] and is now maintained as part of the +[Ansible] by [Red Hat] project. + +[ansible]: https://ansible.com +[contribution guidelines]: https://ansible-lint.readthedocs.io/contributing +[gplv3]: https://github.com/ansible/ansible-lint/blob/main/COPYING +[mit]: https://github.com/ansible/ansible-lint/blob/main/docs/licenses/LICENSE.mit.txt +[red hat]: https://redhat.com +[will thames]: https://github.com/willthames diff --git a/ansible.cfg b/ansible.cfg new file mode 100644 index 0000000..b341954 --- /dev/null +++ b/ansible.cfg @@ -0,0 +1,2 @@ +[defaults] +collections_path = examples/playbooks/collections diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..fa66b52 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,10 @@ +--- +codecov: + require_ci_to_pass: true +comment: false +coverage: + status: + patch: false + project: + default: + threshold: 0.5% diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..c50923c --- /dev/null +++ b/conftest.py @@ -0,0 +1,51 @@ +"""PyTest Fixtures.""" +import importlib +import os +import subprocess +import sys + +# Ensure we always run from the root of the repository +if os.getcwd() != os.path.dirname(__file__): + os.chdir(os.path.dirname(__file__)) + +# checking if user is running pytest without installing test dependencies: +missing = [] +for module in ["ansible", "black", "flake8", "mypy", "pylint"]: + if not importlib.util.find_spec(module): + missing.append(module) +if missing: + print( + f"FATAL: Missing modules: {', '.join(missing)} -- probably you missed installing test requirements with: pip install -e '.[test]'", + file=sys.stderr, + ) + sys.exit(1) +# we need to be sure that we have the requirements installed as some tests +# might depend on these. This approach is compatible with GHA caching. +try: + subprocess.check_output( + ["./tools/install-reqs.sh"], + stderr=subprocess.PIPE, + text=True, + ) +except subprocess.CalledProcessError as exc: + print(f"{exc}\n{exc.stderr}\n{exc.stdout}", file=sys.stderr) + sys.exit(1) + +# flake8: noqa: E402 +from ansible.module_utils.common.yaml import ( # pylint: disable=wrong-import-position + HAS_LIBYAML, +) + +if not HAS_LIBYAML and sys.version_info >= (3, 9, 0): + # While presence of libyaml is not required for runtime, we keep this error + # fatal here in order to be sure that we spot libyaml errors during testing. + # + # For 3.8.x we do not do this check, as libyaml does not have an arm64 build for py38. + print( + "FATAL: For testing, we require pyyaml to be installed with its native extension, missing it would make testing 3x slower and risk missing essential bugs.", + file=sys.stderr, + ) + sys.exit(1) + + +os.environ["NO_COLOR"] = "1" diff --git a/cspell.config.yaml b/cspell.config.yaml new file mode 100644 index 0000000..fce0237 --- /dev/null +++ b/cspell.config.yaml @@ -0,0 +1,22 @@ +--- +dictionaryDefinitions: + - name: words + path: .config/dictionary.txt + addWords: true +dictionaries: + # Use `cspell-cli trace word` to check where a work is defined + - en_US + - bash + - words + - python +ignorePaths: + - cspell.config.yaml + # The requirements file + - .config/requirements.txt + - docs/requirements.txt + - docs/requirements.in + # Test fixtures generated from outside + - test/**/*.result + - src/ansiblelint/schemas/*.json + # Other + - "*.svg" diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 0000000..22695c6 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,16 @@ +# Old compiled python stuff +*.py[co] +# package building stuff +build +# Emacs backup files... +*~ +.\#* +.doctrees +# Generated docs stuff +ansible*.xml +.buildinfo +objects.inv +.doctrees +*.min.css +_build +rst_warnings diff --git a/docs/.nojekyll b/docs/.nojekyll new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/docs/.nojekyll diff --git a/docs/_static/ansible-lint.svg b/docs/_static/ansible-lint.svg new file mode 100644 index 0000000..eb6054a --- /dev/null +++ b/docs/_static/ansible-lint.svg @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> +<svg width="100%" height="100%" viewBox="0 0 16 16" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;"> + <path d="M6,2L6,2.984L5.911,2.984C5.711,2.984 5.524,3.024 5.348,3.105C5.175,3.183 5.022,3.289 4.891,3.424C4.764,3.555 4.661,3.71 4.582,3.892C4.508,4.072 4.471,4.26 4.471,4.459C4.471,4.687 4.475,4.913 4.483,5.137C4.491,5.361 4.491,5.583 4.483,5.802C4.479,6.02 4.461,6.233 4.428,6.443C4.395,6.652 4.339,6.853 4.26,7.045C4.181,7.238 4.073,7.423 3.936,7.597C3.823,7.741 3.685,7.875 3.523,8C3.685,8.125 3.823,8.259 3.936,8.403C4.073,8.577 4.181,8.762 4.26,8.955C4.339,9.148 4.395,9.35 4.428,9.563C4.461,9.769 4.479,9.982 4.483,10.204C4.491,10.42 4.491,10.641 4.483,10.869C4.475,11.089 4.471,11.313 4.471,11.541C4.471,11.744 4.508,11.935 4.582,12.114C4.661,12.291 4.764,12.447 4.891,12.582C5.022,12.713 5.174,12.819 5.348,12.901C5.523,12.978 5.711,13.016 5.911,13.016L6,13.016L6,14L5.911,14C5.597,14 5.294,13.938 5.001,13.815C4.708,13.692 4.449,13.515 4.226,13.285C4.003,13.056 3.838,12.805 3.733,12.532L3.732,12.531C3.634,12.265 3.568,11.99 3.535,11.707L3.535,11.706C3.507,11.422 3.499,11.132 3.511,10.836C3.523,10.546 3.529,10.257 3.529,9.967C3.529,9.769 3.49,9.58 3.412,9.401L3.411,9.399C3.337,9.219 3.234,9.064 3.104,8.934L3.102,8.932C2.976,8.798 2.825,8.692 2.647,8.614L2.646,8.613C2.47,8.532 2.285,8.492 2.089,8.492L2,8.492L2,7.508L2.089,7.508C2.285,7.508 2.471,7.47 2.647,7.393C2.825,7.311 2.976,7.204 3.103,7.074C3.234,6.939 3.337,6.783 3.412,6.606L3.412,6.605C3.49,6.426 3.529,6.236 3.529,6.033C3.529,5.743 3.523,5.454 3.511,5.164C3.499,4.868 3.507,4.581 3.535,4.301L3.535,4.299C3.568,4.012 3.634,3.735 3.732,3.469L3.733,3.468C3.838,3.195 4.003,2.944 4.226,2.715C4.449,2.485 4.708,2.308 5.001,2.185C5.294,2.062 5.597,2 5.911,2L6,2ZM13.923,8.492L14,8.492L14,7.508L13.911,7.508C13.715,7.508 13.53,7.468 13.354,7.387L13.353,7.386C13.175,7.308 13.024,7.202 12.898,7.068L12.896,7.066C12.766,6.936 12.663,6.781 12.589,6.601L12.588,6.599C12.51,6.42 12.471,6.231 12.471,6.033C12.471,5.743 12.477,5.454 12.489,5.164C12.501,4.868 12.493,4.578 12.465,4.295L12.465,4.293C12.432,4.01 12.366,3.735 12.268,3.469L12.267,3.468C12.162,3.195 11.997,2.944 11.774,2.715C11.551,2.485 11.292,2.308 10.999,2.185C10.706,2.062 10.403,2 10.089,2L10,2L10,2.984L10.089,2.984C10.289,2.984 10.477,3.022 10.652,3.099C10.826,3.181 10.978,3.287 11.109,3.418C11.236,3.553 11.339,3.709 11.418,3.886C11.492,4.065 11.529,4.256 11.529,4.459C11.529,4.687 11.525,4.911 11.517,5.131C11.509,5.359 11.509,5.58 11.517,5.796C11.521,6.018 11.539,6.231 11.572,6.437C11.605,6.65 11.661,6.852 11.74,7.045C11.819,7.238 11.927,7.423 12.064,7.597C12.177,7.741 12.315,7.875 12.477,8C12.467,8.008 12.456,8.016 12.446,8.025C12.976,8.083 13.476,8.246 13.923,8.492Z" style="fill:rgb(128,128,128);"/> + <path d="M12,9C11.407,9 10.827,9.176 10.333,9.506C9.84,9.835 9.455,10.304 9.228,10.852C9.001,11.4 8.942,12.003 9.058,12.585C9.173,13.167 9.459,13.702 9.879,14.121C10.298,14.541 10.833,14.827 11.415,14.942C11.997,15.058 12.6,14.999 13.148,14.772C13.696,14.545 14.165,14.16 14.494,13.667C14.824,13.173 15,12.593 15,12C14.999,11.205 14.683,10.442 14.12,9.88C13.558,9.317 12.795,9.001 12,9Z" style="fill:rgb(128,128,128);fill-rule:nonzero;"/> +</svg> diff --git a/docs/_static/images/ansible-lint-512.png b/docs/_static/images/ansible-lint-512.png Binary files differnew file mode 100644 index 0000000..30ed574 --- /dev/null +++ b/docs/_static/images/ansible-lint-512.png diff --git a/docs/_static/images/favicon.ico b/docs/_static/images/favicon.ico Binary files differnew file mode 100644 index 0000000..23ad7da --- /dev/null +++ b/docs/_static/images/favicon.ico diff --git a/docs/_static/images/logo_invert.png b/docs/_static/images/logo_invert.png Binary files differnew file mode 100644 index 0000000..49e0897 --- /dev/null +++ b/docs/_static/images/logo_invert.png diff --git a/docs/_static/theme_overrides.css b/docs/_static/theme_overrides.css new file mode 100644 index 0000000..b6b5d99 --- /dev/null +++ b/docs/_static/theme_overrides.css @@ -0,0 +1,24 @@ +/* table width fix via: https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html */ + +/* override table width restrictions */ +@media screen and (min-width: 767px) { + .wy-table-responsive table td { + /* !important prevents the common CSS stylesheets from overriding + * this as on RTD they are loaded after this stylesheet */ + white-space: normal !important; + } + + .wy-table-responsive { + overflow: visible !important; + } +} + +.icon-home { + display: none !important; +} + +/* Avoid using the red text for tt blocks */ +.rst-content code.literal, +.rst-content tt.literal { + color: #888888 !important; +} diff --git a/docs/configuring.md b/docs/configuring.md new file mode 100644 index 0000000..4210e9e --- /dev/null +++ b/docs/configuring.md @@ -0,0 +1,72 @@ +# Configuration + +Customize how Ansible-lint runs against automation content to suit your needs. +You can ignore certain rules, enable `opt-in` rules, and control various other +settings. + +Ansible-lint loads configuration from a file in the current working directory or +from a file that you specify in the command line. If you provide configuration +on both via a config file and on the command line, list values are merged (for +example `exclude_paths`) and **True** is preferred for boolean values like +`quiet`. + +## Using local configuration files + +Specify Ansible-lint configuration in either `.ansible-lint` or +`.config/ansible-lint.yml` in your current working directory. + +!!! note + + If Ansible-lint cannot find a configuration file in the current directory it attempts to locate it in a parent directory. + However Ansible-lint does not try to load configuration that is outside the git repository. + +## Specifying configuration files + +Use the `-c <filename>` CLI flag with command line invocations of Ansible-lint, +for example: + +```bash +ansible-lint -c path/to/ansible-lint-dev.yml +``` + +## Ansible-lint configuration + +The following values are supported, and function identically to their CLI +counterparts: + +```yaml +--8<-- ".ansible-lint" +``` + +## Ignoring rules for entire files + +Ansible-lint will load skip rules from `.ansible-lint-ignore` file that is +adjacent to its config file. The file format is very simple, containing the +filename and the rule to be ignored. It also supports comments starting with +`#`. + +```yaml title=".ansible-lint-ignore" +# this is just a comment +playbook.yml package-latest # disable package-latest rule for playbook.yml +playbook.yml deprecated-module +``` + +The file can also be created by adding `--generate-ignore` to the command line. +Keep in mind that this will override any existing file content. + +## Pre-commit setup + +To use Ansible-lint with [pre-commit], add the following to the +`.pre-commit-config.yaml` file in your local repository. + +Change **rev:** to either a commit sha or tag of Ansible-lint that contains +`.pre-commit-hooks.yaml`. + +```yaml +- repo: https://github.com/ansible/ansible-lint + rev: ... # put latest release tag from https://github.com/ansible/ansible-lint/releases/ + hooks: + - id: ansible-lint +``` + +[pre-commit]: https://pre-commit.com/ diff --git a/docs/contributing.md b/docs/contributing.md new file mode 100644 index 0000000..d2e964c --- /dev/null +++ b/docs/contributing.md @@ -0,0 +1,123 @@ +# Contributing to Ansible-lint + +To contribute to ansible-lint, please use pull requests on a branch of your own +fork. + +After [creating your fork on GitHub], you can do: + +```shell-session +$ git clone git@github.com:your-name/ansible-lint +$ cd ansible-lint +$ git checkout -b your-branch-name +# DO SOME CODING HERE +$ git add your new files +$ git commit -v +$ git push origin your-branch-name +``` + +You will then be able to create a pull request from your commit. + +All fixes to core functionality (i.e. anything except docs or examples) should +be accompanied by tests that fail prior to your change and succeed afterwards. + +Feel free to raise issues in the repo if you feel unable to contribute a code +fix. + +## Standards + +ansible-lint is flake8 compliant with **max-line-length** set to 100. + +ansible-lint works only with supported Ansible versions at the time it was +released. + +Automated tests will be run against all PRs for flake8 compliance and Ansible +compatibility — to check before pushing commits, just use +[tox](https://tox.wiki/en/latest/). + +% DO-NOT-REMOVE-deps-snippet-PLACEHOLDER + +## Talk to us + +Use Github [discussions] forum or for a live chat experience try +`#ansible-devtools` IRC channel on libera.chat or Matrix room +[#devtools:ansible.com](https://matrix.to/#/#devtools:ansible.com). + +For the full list of Ansible IRC and Mailing list, please see the [Ansible +Communication] page. Release announcements will be made to the [Ansible +Announce] list. + +Possible security bugs should be reported via email to +<mailto:security@ansible.com>. + +## Code of Conduct + +As with all Ansible projects, we have a [Code of Conduct]. + +[.flake8]: https://github.com/ansible/ansible-lint/blob/main/.flake8 +[ansible announce]: https://groups.google.com/forum/#!forum/ansible-announce +[ansible communication]: + https://docs.ansible.com/ansible/latest/community/communication.html +[code of conduct]: + https://docs.ansible.com/ansible/latest/community/code_of_conduct.html +[creating your fork on github]: + https://docs.github.com/en/get-started/quickstart/contributing-to-projects +[discussions]: https://github.com/ansible/ansible-lint/discussions +[supported ansible versions]: + https://docs.ansible.com/ansible-core/devel/reference_appendices/release_and_maintenance.html#ansible-core-release-cycle +[tox]: https://tox.readthedocs.io + +## Module dependency graph + +Extra care should be taken when considering adding any dependency. Removing most +dependencies on Ansible internals is desired as these can change without any +warning. + +```bash exec="1" source="console" +_PIP_USE_IMPORTLIB_METADATA=0 pipdeptree -p ansible-lint +``` + +## Adding a new rule + +Writing a new rule is as easy as adding a single new rule, one that combines +**implementation, testing and documentation**. + +One good example is [MetaTagValidRule] which can easily be copied in order to +create a new rule by following the steps below: + +- Use a short but clear class name, which must match the filename +- Pick an unused `id`, the first number is used to determine rule section. Look + at [rules](rules/index.md) page and pick one that matches the best your new + rule and ee which one fits best. +- Include `experimental` tag. Any new rule must stay as experimental for at + least two weeks until this tag is removed in next major release. +- Update all class level variables. +- Implement linting methods needed by your rule, these are those starting with + **match** prefix. Implement only those you need. For the moment you will need + to look at how similar rules were implemented to figure out what to do. +- Update the tests. It must have at least one test and likely also a negative + match one. +- If the rule is task specific, it may be best to include a test to verify its + use inside blocks as well. +- Optionally run only the rule specific tests with a command like: + {command}`tox -e py38-core -- -k NewRule` +- Run {command}`tox` in order to run all ansible-lint tests. Adding a new rule + can break some other tests. Update them if needed. +- Run {command}`ansible-lint -L` and check that the rule description renders + correctly. +- Build the docs using {command}`tox -e docs` and check that the new rule is + displayed correctly in them. + +[metatagvalidrule]: + https://github.com/ansible/ansible-lint/blob/main/src/ansiblelint/rules/meta_no_tags.py + +## Documentation changes + +To build the docs, run `tox -e docs`. At the end of the build, you will see the +local location of your built docs. + +Building docs locally may not be identical to CI/CD builds. We recommend you to +create a draft PR and check the RTD PR preview page too. + +If you do not want to learn the reStructuredText format, you can also +[file an issue](https://github.com/ansible/ansible-lint/issues), and let us know +how we can improve our documentation. diff --git a/docs/custom-rules.md b/docs/custom-rules.md new file mode 100644 index 0000000..8c57d90 --- /dev/null +++ b/docs/custom-rules.md @@ -0,0 +1,120 @@ +# Custom linting rules + +Define and use your own sets of rules with Ansible-lint. + +## Rule definitions + +You define each custom rule in a unique Python class file. Default rules are +named _DeprecatedVariableRule.py_, etc. + +Each rule should have a short description as a Python docstring wrapped in +triple quotes `"""` immediately after the class name. The short description +should be brief and meaningfully explain the purpose of the rule to users. + +Each rule definition should have the following parts: + +- `id` provides a unique identifier to the rule. +- `description` explains what the rule checks for. +- `tags` specifies one or more tags for including or excluding the rule. + +### Match and matchtask methods + +Each rule definition should also invoke one of the following methods: + +- `match` takes a line and returns: + - None or False if the line does not match the test. + - True or a custom message if the line does match the test. (This allows one + rule to test multiple behaviors - see e.g. the + _CommandsInsteadOfModulesRule_.) +- `matchtask` operates on a single task or handler, such that tasks get + standardized to always contain a _module_ key and _module_arguments_ key. + Other common task modifiers, such as _when_, _with_items_, etc., are also + available as keys if present in the task. + +The following is an example rule that uses the `match` method: + +```python +from ansiblelint.rules import AnsibleLintRule + +class DeprecatedVariableRule(AnsibleLintRule): + """Deprecated variable declarations.""" + + id = 'EXAMPLE002' + description = 'Check for lines that have old style ${var} ' + \ + 'declarations' + tags = { 'deprecations' } + + def match(self, line: str) -> Union[bool, str]: + return '${' in line +``` + +The following is an example rule that uses the `matchtask` method: + +```python +from typing import TYPE_CHECKING, Any, Dict, Union + +import ansiblelint.utils +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + +class TaskHasTag(AnsibleLintRule): + """Tasks must have tag.""" + + id = 'EXAMPLE001' + description = 'Tasks must have tag' + tags = ['productivity'] + + def matchtask(self, task: Dict[str, Any], file: 'Lintable' | None = None) -> Union[bool,str]: + # If the task include another task or make the playbook fail + # Don't force to have a tag + if not set(task.keys()).isdisjoint(['include','fail']): + return False + + # Task should have tags + if not task.has_key('tags'): + return True + + return False +``` + +The task argument to `matchtask` contains a number of keys - the critical one is +_action_. The value of `task['action']` contains the module being used, and the +arguments passed, both as key-value pairs and a list of other arguments (e.g. +the command used with shell). + +In ansible-lint 2.0.0, `task['action']['args']` was renamed +`task['action']['module_arguments']` to avoid a clash when a module actually +takes args as a parameter key (e.g. ec2_tag) + +In ansible-lint 3.0.0 `task['action']['module']` was renamed +`task['action']['__ansible_module__']` to avoid a clash when a module take +module as an argument. As a precaution, `task['action']['module_arguments']` was +renamed `task['action']['__ansible_arguments__']`. + +## Packaging custom rules + +Ansible-lint automatically loads and enables custom rules in Python packages +from the _custom_ subdirectory. This subdirectory is part of the Ansible-lint +installation directory, for example: + +`/usr/lib/python3.8/site-packages/ansiblelint/rules/custom/` + +To automatically load custom rules, do the following: + +1. Package your custom rules as a Python package with a descriptive name. + +2. Configure the \[options\] section of the `setup.cfg` of your custom rules + Python package as in the following example: + + ```yaml + [options] + packages = + ansiblelint.rules.custom.<your_custom_rules_subdir> + package_dir = + ansiblelint.rules.custom.<your_custom_rules_subdir> = <your_rules_source_code_subdir> + ``` + +3. Install the Python package into + `<ansible_lint_custom_rules_dir>/custom/<your_custom_rules_subdir>/`. diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico Binary files differnew file mode 100644 index 0000000..ea4ebc1 --- /dev/null +++ b/docs/images/favicon.ico diff --git a/docs/images/logo.png b/docs/images/logo.png Binary files differnew file mode 100644 index 0000000..f3626b0 --- /dev/null +++ b/docs/images/logo.png diff --git a/docs/images/logo.svg b/docs/images/logo.svg new file mode 100644 index 0000000..ffe210b --- /dev/null +++ b/docs/images/logo.svg @@ -0,0 +1,7 @@ +<?xml version="1.0" encoding="UTF-8" standalone="no"?> +<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> +<svg width="100%" height="100%" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;"> + <g id="ansible-aap"> + <path d="M128,0C198.645,0 256,57.355 256,128C256,198.645 198.645,256 128,256C57.355,256 0,198.645 0,128C0,57.355 57.355,0 128,0ZM189.094,178.686L138.002,55.958C136.786,52.714 133.677,50.416 130.163,50.551C126.513,50.416 123.269,52.714 122.053,55.958L66.23,190.446L85.423,190.446L107.455,135.029L173.55,188.418C175.442,190.31 178.011,191.392 180.579,191.527C185.58,191.662 189.77,187.742 189.905,182.606L189.905,182.336C189.77,181.119 189.499,179.903 189.094,178.686ZM130.298,78.125L163.413,159.899L113.402,120.431L130.298,78.125Z" style="fill:white;"/> + </g> +</svg> diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..df877c6 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,28 @@ +# Ansible Lint Documentation + +## About Ansible Lint + +Ansible Lint is a command-line tool for linting **playbooks, roles and +collections** aimed toward any Ansible users. Its main goal is to promote proven +practices, patterns and behaviors while avoiding common pitfalls that can easily +lead to bugs or make code harder to maintain. + +Ansible lint is also supposed to help users upgrade their code to work with +newer versions of Ansible. Due to this reason we recommend using it with the +newest version of Ansible, even if the version used in production may be older. + +As any other linter, it is opinionated. Still, its rules are the result of +community contributions and they can always be disabled based individually or by +category by each user. + +[Ansible Galaxy project](https://github.com/ansible/galaxy/) makes use of this +linter to compute quality scores for [Galaxy Hub](https://galaxy.ansible.com/) +contributed content. This does not mean this tool only targets those that want +to share their code. Files like `galaxy.yml`, or sections like `galaxy_info` +inside `meta.yml` help with documentation and maintenance, even for unpublished +roles or collections. + +The project was originally started by +[@willthames](https://github.com/willthames/) and has since been adopted by the +Ansible Community team. Its development is purely community driven while keeping +permanent communications with other Ansible teams. diff --git a/docs/installing.md b/docs/installing.md new file mode 100644 index 0000000..026b580 --- /dev/null +++ b/docs/installing.md @@ -0,0 +1,80 @@ +# Installing + +Install Ansible-lint to apply rules and follow best practices with your +automation content. + +!!! note + + Ansible-lint does not currently support installation on Windows systems. + +!!! warning + + Ansible-lint does not support any installation methods that are not mentioned in + this document. Before raising any bugs related to installation, review all of + the following details: + + - You should use installation methods outlined in this document only. + - You should upgrade the Python installer (`pip` or `pipx`) to the latest + version available from pypi.org. If you used a system package manager, you + will need to upgrade the installer to a newer version. + - If you are installing from a git zip archive, which is not supported but + should work, ensure you use the main branch and the latest version of pip and + setuptools. + - If you are installing Ansible-lint within a container or system package, you + should not report the issue here. Contact the relevant container or package + provider instead. + - If you are using [poetry](https://python-poetry.org/), read this + [discussion](https://github.com/ansible/ansible-lint/discussions/2820#discussioncomment-4400380). + + Pull requests to improve installation instructions are welcome. Any new issues + related to installation will be closed and locked. + +For a container image, we recommend using +[creator-ee](https://github.com/ansible/creator-ee/), which includes +Ansible-lint. If you have a use case that the `creator-ee` container doesn't +satisfy, please contact the team through the +[discussions](https://github.com/ansible/ansible-lint/discussions) forum. + +You can also run Ansible-lint on your source code with the +[Ansible-lint GitHub action](https://github.com/marketplace/actions/ansible-lint) +instead of installing it directly. + +## Installing the latest version + +You can install the most recent version of Ansible-lint with the [pip3] or +[pipx] Python package manager. Use [pipx] to isolate Ansible-lint from your +current Python environment as an alternative to creating a virtual environment. + +```bash +# This also installs ansible-core if it is not already installed +pip3 install ansible-lint +``` + +## Installing on Fedora and RHEL + +You can install Ansible-lint on Fedora, or Red Hat Enterprise Linux (RHEL) with +the `dnf` package manager. + +```bash +dnf install ansible-lint +``` + +!!! note + + On RHEL, `ansible-lint` package is part of "Red Hat Ansible Automation + Platform" subscription, which needs to be activated. + +## Installing from source code + +**Note**: `pip>=22.3.1` is required for installation from the source repository. +Please consult the [PyPA User Guide] to learn more about managing Pip versions. + +```bash +pip3 install git+https://github.com/ansible/ansible-lint +``` + +[installing_from_source]: https://pypi.org/project/pip/ +[pip3]: https://pypi.org/project/pip/ +[pipx]: https://pypa.github.io/pipx/ +[pypa user guide]: + https://packaging.python.org/en/latest/tutorials/installing-packages/#ensure-pip-setuptools-and-wheel-are-up-to-date diff --git a/docs/licenses/LICENSE.mit.txt b/docs/licenses/LICENSE.mit.txt new file mode 100644 index 0000000..4b48ba2 --- /dev/null +++ b/docs/licenses/LICENSE.mit.txt @@ -0,0 +1,21 @@ +Copyright (c) 2013-2018 Will Thames <will@thames.id.au> +Copyright (c) 2018 Ansible by Red Hat + + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/docs/philosophy.md b/docs/philosophy.md new file mode 100644 index 0000000..2ef698d --- /dev/null +++ b/docs/philosophy.md @@ -0,0 +1,109 @@ +# Philosophy of ansible-lint + +Ansible **playbooks, roles, and collections** should read like documentation, be +production ready, unambiguous, and provide consistent results. + +`Ansible-lint` should be considered a trusted advisor, helping ansible content +creators write and package high-quality Ansible content. While not all rules may +be applicable in all situations, they should be followed whenever possible. + +The goal of `ansible-lint` is to ensure that content created by different people +has a similar look and feel. This makes the adoption and use of Ansible content +easier in the community and enterprise. By keeping the number of configurable +features at a minimum, consistent outcomes between authors can be achieved. + +## History and the future + +`ansible-lint` is almost a decade old, and its current list of rules is the +result of a collaboration between many people. The tool originated as a +community project and is currently part of the Ansible Galaxy submission and +validation process. + +In the future, it will be an official component of the Red Hat Ansible +Automation Platform, used during the collections certification process and the +recommended Ansible content linter for Red Hat customers. + +Starting in 2022, additional rules will be added that help content creators +ready their content for production use. It will be through the use of +ansible-lint and these rules, developers can have confidence their playbooks, +roles, and task files are easy to understand and produce consistent results when +run against anything, from servers in a home lab to mission-critical systems in +the cloud. + +## Style and formatting + +The focus of Ansible content creators should be on automation, outcomes and +readability, rather than style or formatting. This is why we follow the same +concepts as other code formatting tools like +[black](https://github.com/psf/black) and [prettier](https://prettier.io/). + +Adoption of `ansible-lint` will save time by keeping reviews focused on the +quality of the content and less so on the nuances of formatting and style. + +As code formatting is not an art, we can save your project time and effort by +applying a standardized code style and formatting. + +## Q&A + +### Why does ansible-lint not accept all valid ansible syntax? + +`ansible-core` continues to mature while maintaining backward compatibility with +early versions. `ansible-lint` has never intended to support the whole +historical Ansible language syntax variations, but instead only the best of it. + +It supports a broad vocabulary of keywords and styles. Over time, changes in the +language have led to an improved experience for authors and consumers of Ansible +content. The rules in `ansible-lint` suggest the use of these patterns. + +It is these usage patterns that are written as rules in `ansible-lint`, leading +to improved readability of **playbooks, roles**, and **collections**. The linter +will always be more restrictive and opinionated regarding what it accepts. It is +part of its design. We are not forced to keep the same backward compatibility +level as Ansible, so we can tell people to avoid specific syntax for various +reasons, such as being deprecated, unsafe, or hard to maintain. + +Based on the extensive history of `ansible-lint` and user feedback, it notifies +you about discouraged practices, sometimes before `ansible-core` starts doing +so. + +### What if I do not agree with a specific rule? + +We recognize that some projects will find at least one rule that might not suit +their needs. Use the `skip_list` feature to temporarily bypass that rule until +you have time to update your Ansible content. + +### Who decides which best practices get adopted in ansible-lint? + +The main source of new ideas was and remains our community. Before proposing a +change, check with a few other Ansible users that work on different projects and +see if they find it useful or not. + +It is better to get enough relevant feedback on our discussion forum before +starting to implement new rules. If the proposed rule appears popular and does +not conflict with existing rules, a core (maintainer) will tell you that the +proposed rule can be added to ansible-lint, so you can start working on it +without fear of rejection. + +The core team will decide on how a new rule will be added. Usually, they are +added as experimental (warnings only) or even as opt-ins, being made implicit +only when a major version is released. + +### Do I need to pass all rules to get my collection certified? + +Not really. The certification process is likely to use only a subset of rules. +At this time, we are working on building that list. + +### Why lots of official Ansible docs examples are not passing linting? + +Most of the official examples are written to exemplify specific features, and +some might conflict with our rules. Still, we plan to include linting of +official examples in the future and add specific exclusions where needed, making +it more likely that a copy/paste from the docs will not raise a bunch of linter +violations. + +### Why does ansible-lint require an Ansible version newer than what I use in production? + +Use `ansible-lint` as a **static analysis** tool for your content. You can run +it with a version of ansible that is different than what you use in production. +This helps you prepare your content for the future, so don't be afraid of using +it in such a way. diff --git a/docs/pkg/.gitignore b/docs/pkg/.gitignore new file mode 100644 index 0000000..d6b7ef3 --- /dev/null +++ b/docs/pkg/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore diff --git a/docs/profiles.md b/docs/profiles.md new file mode 100644 index 0000000..815c62a --- /dev/null +++ b/docs/profiles.md @@ -0,0 +1,112 @@ +<!--- +Do not manually edit, generated from generate_docs.py +--> + +# Profiles + +Ansible-lint profiles gradually increase the strictness of rules as your Ansible +content lifecycle. To configure linter to use a specific profile, read +[applying-profiles][]. + +!!! note + + Rules with `*` in the suffix are not yet implemented but are documented with linked GitHub issues. + +## min + +The `min` profile ensures that Ansible can load content. Rules in this profile +are mandatory because they prevent fatal errors. You can add files to the +exclude list or provide dependencies to load the correct files. + +- [internal-error](rules/internal-error.md) +- [load-failure](rules/load-failure.md) +- [parser-error](rules/parser-error.md) +- [syntax-check](rules/syntax-check.md) + +## basic + +The `basic` profile prevents common coding issues and enforces standard styles +and formatting. It extends [min](#min) profile. + +- [command-instead-of-module](rules/command-instead-of-module.md) +- [command-instead-of-shell](rules/command-instead-of-shell.md) +- [deprecated-bare-vars](rules/deprecated-bare-vars.md) +- [deprecated-command-syntax](rules/deprecated-command-syntax.md) +- [deprecated-local-action](rules/deprecated-local-action.md) +- [deprecated-module](rules/deprecated-module.md) +- [inline-env-var](rules/inline-env-var.md) +- [key-order](rules/key-order.md) +- [literal-compare](rules/literal-compare.md) +- [jinja](rules/jinja.md) +- [no-jinja-when](rules/no-jinja-when.md) +- [no-tabs](rules/no-tabs.md) +- [partial-become](rules/partial-become.md) +- [playbook-extension](rules/playbook-extension.md) +- [role-name](rules/role-name.md) +- [schema](rules/schema.md) +- [name](rules/name.md) +- [var-naming](rules/var-naming.md) +- [yaml](rules/yaml.md) + +## moderate + +The `moderate` profile ensures that content adheres to best practices for making +content easier to read and maintain. It extends [basic](#basic) profile. + +- [name[template]](rules/name.md) +- [name[imperative]](https://github.com/ansible/ansible-lint/issues/2170) +- [name[casing]](rules/name.md) +- [no-free-form](https://github.com/ansible/ansible-lint/issues/2117) +- [spell-var-name](https://github.com/ansible/ansible-lint/issues/2168) + +## safety + +The `safety` profile avoids module calls that can have non-determinant outcomes +or security concerns. It extends [moderate](#moderate) profile. + +- [avoid-implicit](rules/avoid-implicit.md) +- [latest](rules/latest.md) +- [package-latest](rules/package-latest.md) +- [risky-file-permissions](rules/risky-file-permissions.md) +- [risky-octal](rules/risky-octal.md) +- [risky-shell-pipe](rules/risky-shell-pipe.md) + +## shared + +The `shared` profile ensures that content follows best practices for packaging +and publishing. This profile is intended for content creators who want to make +Ansible playbooks, roles, or collections available from +[galaxy.ansible.com](https://galaxy.ansible.com/), +[automation-hub](https://console.redhat.com/ansible/automation-hub), or a +private instance. It extends [safety](#safety) profile. + +- [galaxy](rules/galaxy.md) +- [ignore-errors](rules/ignore-errors.md) +- [layout](https://github.com/ansible/ansible-lint/issues/1900) +- [meta-incorrect](rules/meta-incorrect.md) +- [meta-no-info](rules/meta-no-info.md) +- [meta-no-tags](rules/meta-no-tags.md) +- [meta-video-links](rules/meta-video-links.md) +- [meta-version](https://github.com/ansible/ansible-lint/issues/2103) +- [meta-runtime](rules/meta-runtime.md) +- [no-changed-when](rules/no-changed-when.md) +- [no-handler](rules/no-handler.md) +- [no-relative-paths](rules/no-relative-paths.md) +- [max-block-depth](https://github.com/ansible/ansible-lint/issues/2173) +- [max-tasks](https://github.com/ansible/ansible-lint/issues/2172) +- [unsafe-loop](https://github.com/ansible/ansible-lint/issues/2038) + +## production + +The `production` profile ensures that content meets requirements for inclusion +in +[Ansible Automation Platform (AAP)](https://www.redhat.com/en/technologies/management/ansible) +as validated or certified content. It extends [shared](#shared) profile. + +- [avoid-dot-notation](https://github.com/ansible/ansible-lint/issues/2174) +- [disallowed-ignore](https://github.com/ansible/ansible-lint/issues/2121) +- [fqcn](rules/fqcn.md) +- [import-task-no-when](https://github.com/ansible/ansible-lint/issues/2219) +- [meta-no-dependencies](https://github.com/ansible/ansible-lint/issues/2159) +- [single-entry-point](https://github.com/ansible/ansible-lint/issues/2242) +- [use-loop](https://github.com/ansible/ansible-lint/issues/2204) diff --git a/docs/redirects.yml b/docs/redirects.yml new file mode 100644 index 0000000..cda53ee --- /dev/null +++ b/docs/redirects.yml @@ -0,0 +1,16 @@ +# Authoritative list of redirects we have configured in RTD, +# https://pypi.org/project/readthedocs-cli/ +--- +- type: page + from_url: /rules/git-latest/ + to_url: /rules/latest/ +- type: page + from_url: /rules/hg-latest/ + to_url: /rules/latest/ +- type: page + from_url: /default_rules/index/ + to_url: /rules/ +# Keep this last or it will match all the other rules +- type: sphinx_htmldir + from_url: "" + to_url: "" diff --git a/docs/rules/args.md b/docs/rules/args.md new file mode 120000 index 0000000..194c2d3 --- /dev/null +++ b/docs/rules/args.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/args.md
\ No newline at end of file diff --git a/docs/rules/avoid-implicit.md b/docs/rules/avoid-implicit.md new file mode 120000 index 0000000..4ddfc82 --- /dev/null +++ b/docs/rules/avoid-implicit.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/avoid_implicit.md
\ No newline at end of file diff --git a/docs/rules/command-instead-of-module.md b/docs/rules/command-instead-of-module.md new file mode 120000 index 0000000..9f6809d --- /dev/null +++ b/docs/rules/command-instead-of-module.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/command_instead_of_module.md
\ No newline at end of file diff --git a/docs/rules/command-instead-of-shell.md b/docs/rules/command-instead-of-shell.md new file mode 120000 index 0000000..2bf0747 --- /dev/null +++ b/docs/rules/command-instead-of-shell.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/command_instead_of_shell.md
\ No newline at end of file diff --git a/docs/rules/deprecated-bare-vars.md b/docs/rules/deprecated-bare-vars.md new file mode 120000 index 0000000..80ca8f7 --- /dev/null +++ b/docs/rules/deprecated-bare-vars.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/deprecated_bare_vars.md
\ No newline at end of file diff --git a/docs/rules/deprecated-command-syntax.md b/docs/rules/deprecated-command-syntax.md new file mode 120000 index 0000000..70cb557 --- /dev/null +++ b/docs/rules/deprecated-command-syntax.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/deprecated_command_syntax.md
\ No newline at end of file diff --git a/docs/rules/deprecated-local-action.md b/docs/rules/deprecated-local-action.md new file mode 120000 index 0000000..fd44cd8 --- /dev/null +++ b/docs/rules/deprecated-local-action.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/deprecated_local_action.md
\ No newline at end of file diff --git a/docs/rules/deprecated-module.md b/docs/rules/deprecated-module.md new file mode 120000 index 0000000..28dfe7f --- /dev/null +++ b/docs/rules/deprecated-module.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/deprecated_module.md
\ No newline at end of file diff --git a/docs/rules/empty-string-compare.md b/docs/rules/empty-string-compare.md new file mode 120000 index 0000000..3470d52 --- /dev/null +++ b/docs/rules/empty-string-compare.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/empty_string_compare.md
\ No newline at end of file diff --git a/docs/rules/fqcn.md b/docs/rules/fqcn.md new file mode 120000 index 0000000..5623fae --- /dev/null +++ b/docs/rules/fqcn.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/fqcn.md
\ No newline at end of file diff --git a/docs/rules/galaxy.md b/docs/rules/galaxy.md new file mode 120000 index 0000000..02cebd0 --- /dev/null +++ b/docs/rules/galaxy.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/galaxy.md
\ No newline at end of file diff --git a/docs/rules/ignore-errors.md b/docs/rules/ignore-errors.md new file mode 120000 index 0000000..ea5578d --- /dev/null +++ b/docs/rules/ignore-errors.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/ignore_errors.md
\ No newline at end of file diff --git a/docs/rules/index.md b/docs/rules/index.md new file mode 100644 index 0000000..e204ea7 --- /dev/null +++ b/docs/rules/index.md @@ -0,0 +1,52 @@ +# Rules + +- [args][] +- [avoid-implicit][] +- [command-instead-of-module][] +- [command-instead-of-shell][] +- [deprecated-bare-vars][] +- [deprecated-command-syntax][] +- [deprecated-local-action][] +- [deprecated-module][] +- [empty-string-compare][] +- [fqcn][] +- [galaxy][] +- [ignore-errors][] +- [inline-env-var][] +- [internal-error][] +- [jinja][] +- [key-order][] +- [latest][] +- [literal-compare][] +- [load-failure][] +- [loop-var-prefix][] +- [meta-incorrect][] +- [meta-no-info][] +- [meta-no-tags][] +- [meta-runtime][] +- [meta-video-links][] +- [name][] +- [no-changed-when][] +- [no-free-form][] +- [no-handler][] +- [no-jinja-when][] +- [no-log-password][] +- [no-prompting][] +- [no-relative-paths][] +- [no-same-owner][] +- [no-tabs][] +- [only-builtins][] +- [package-latest][] +- [parser-error][] +- [partial-become][] +- [playbook-extension][] +- [risky-file-permissions][] +- [risky-octal][] +- [risky-shell-pipe][] +- [role-name][] +- [run-once][] +- [schema][] +- [syntax-check][] +- [var-naming][] +- [warning][] +- [yaml][] diff --git a/docs/rules/inline-env-var.md b/docs/rules/inline-env-var.md new file mode 120000 index 0000000..8942b5c --- /dev/null +++ b/docs/rules/inline-env-var.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/inline_env_var.md
\ No newline at end of file diff --git a/docs/rules/internal-error.md b/docs/rules/internal-error.md new file mode 120000 index 0000000..8287685 --- /dev/null +++ b/docs/rules/internal-error.md @@ -0,0 +1 @@ +../../src/ansiblelint/_internal/internal_error.md
\ No newline at end of file diff --git a/docs/rules/jinja.md b/docs/rules/jinja.md new file mode 120000 index 0000000..6f5fc6e --- /dev/null +++ b/docs/rules/jinja.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/jinja.md
\ No newline at end of file diff --git a/docs/rules/key-order.md b/docs/rules/key-order.md new file mode 120000 index 0000000..929faa6 --- /dev/null +++ b/docs/rules/key-order.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/key_order.md
\ No newline at end of file diff --git a/docs/rules/latest.md b/docs/rules/latest.md new file mode 120000 index 0000000..6280f1a --- /dev/null +++ b/docs/rules/latest.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/latest.md
\ No newline at end of file diff --git a/docs/rules/literal-compare.md b/docs/rules/literal-compare.md new file mode 120000 index 0000000..4a7ab23 --- /dev/null +++ b/docs/rules/literal-compare.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/literal_compare.md
\ No newline at end of file diff --git a/docs/rules/load-failure.md b/docs/rules/load-failure.md new file mode 120000 index 0000000..38a66a6 --- /dev/null +++ b/docs/rules/load-failure.md @@ -0,0 +1 @@ +../../src/ansiblelint/_internal/load-failure.md
\ No newline at end of file diff --git a/docs/rules/loop-var-prefix.md b/docs/rules/loop-var-prefix.md new file mode 120000 index 0000000..6cebf6a --- /dev/null +++ b/docs/rules/loop-var-prefix.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/loop_var_prefix.md
\ No newline at end of file diff --git a/docs/rules/meta-incorrect.md b/docs/rules/meta-incorrect.md new file mode 120000 index 0000000..34c1891 --- /dev/null +++ b/docs/rules/meta-incorrect.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/meta_incorrect.md
\ No newline at end of file diff --git a/docs/rules/meta-no-info.md b/docs/rules/meta-no-info.md new file mode 120000 index 0000000..1e93715 --- /dev/null +++ b/docs/rules/meta-no-info.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/meta_no_info.md
\ No newline at end of file diff --git a/docs/rules/meta-no-tags.md b/docs/rules/meta-no-tags.md new file mode 120000 index 0000000..b103c57 --- /dev/null +++ b/docs/rules/meta-no-tags.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/meta_no_tags.md
\ No newline at end of file diff --git a/docs/rules/meta-runtime.md b/docs/rules/meta-runtime.md new file mode 120000 index 0000000..82db95a --- /dev/null +++ b/docs/rules/meta-runtime.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/meta_runtime.md
\ No newline at end of file diff --git a/docs/rules/meta-video-links.md b/docs/rules/meta-video-links.md new file mode 120000 index 0000000..176559d --- /dev/null +++ b/docs/rules/meta-video-links.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/meta_video_links.md
\ No newline at end of file diff --git a/docs/rules/name.md b/docs/rules/name.md new file mode 120000 index 0000000..213f924 --- /dev/null +++ b/docs/rules/name.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/name.md
\ No newline at end of file diff --git a/docs/rules/no-changed-when.md b/docs/rules/no-changed-when.md new file mode 120000 index 0000000..e821c77 --- /dev/null +++ b/docs/rules/no-changed-when.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_changed_when.md
\ No newline at end of file diff --git a/docs/rules/no-free-form.md b/docs/rules/no-free-form.md new file mode 120000 index 0000000..ee80943 --- /dev/null +++ b/docs/rules/no-free-form.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_free_form.md
\ No newline at end of file diff --git a/docs/rules/no-handler.md b/docs/rules/no-handler.md new file mode 120000 index 0000000..1b690c6 --- /dev/null +++ b/docs/rules/no-handler.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_handler.md
\ No newline at end of file diff --git a/docs/rules/no-jinja-when.md b/docs/rules/no-jinja-when.md new file mode 120000 index 0000000..d2f9453 --- /dev/null +++ b/docs/rules/no-jinja-when.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_jinja_when.md
\ No newline at end of file diff --git a/docs/rules/no-log-password.md b/docs/rules/no-log-password.md new file mode 120000 index 0000000..ac5e869 --- /dev/null +++ b/docs/rules/no-log-password.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_log_password.md
\ No newline at end of file diff --git a/docs/rules/no-prompting.md b/docs/rules/no-prompting.md new file mode 120000 index 0000000..49b3d4b --- /dev/null +++ b/docs/rules/no-prompting.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_prompting.md
\ No newline at end of file diff --git a/docs/rules/no-relative-paths.md b/docs/rules/no-relative-paths.md new file mode 120000 index 0000000..6fc7d42 --- /dev/null +++ b/docs/rules/no-relative-paths.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_relative_paths.md
\ No newline at end of file diff --git a/docs/rules/no-same-owner.md b/docs/rules/no-same-owner.md new file mode 120000 index 0000000..6367a7a --- /dev/null +++ b/docs/rules/no-same-owner.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_same_owner.md
\ No newline at end of file diff --git a/docs/rules/no-tabs.md b/docs/rules/no-tabs.md new file mode 120000 index 0000000..d938e62 --- /dev/null +++ b/docs/rules/no-tabs.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/no_tabs.md
\ No newline at end of file diff --git a/docs/rules/only-builtins.md b/docs/rules/only-builtins.md new file mode 120000 index 0000000..7fa5e7a --- /dev/null +++ b/docs/rules/only-builtins.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/only_builtins.md
\ No newline at end of file diff --git a/docs/rules/package-latest.md b/docs/rules/package-latest.md new file mode 120000 index 0000000..18d6723 --- /dev/null +++ b/docs/rules/package-latest.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/package_latest.md
\ No newline at end of file diff --git a/docs/rules/parser-error.md b/docs/rules/parser-error.md new file mode 120000 index 0000000..954422f --- /dev/null +++ b/docs/rules/parser-error.md @@ -0,0 +1 @@ +../../src/ansiblelint/_internal/parser-error.md
\ No newline at end of file diff --git a/docs/rules/partial-become.md b/docs/rules/partial-become.md new file mode 120000 index 0000000..f57aa23 --- /dev/null +++ b/docs/rules/partial-become.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/partial_become.md
\ No newline at end of file diff --git a/docs/rules/playbook-extension.md b/docs/rules/playbook-extension.md new file mode 120000 index 0000000..429aa2f --- /dev/null +++ b/docs/rules/playbook-extension.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/playbook_extension.md
\ No newline at end of file diff --git a/docs/rules/risky-file-permissions.md b/docs/rules/risky-file-permissions.md new file mode 120000 index 0000000..a6718bf --- /dev/null +++ b/docs/rules/risky-file-permissions.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/risky_file_permissions.md
\ No newline at end of file diff --git a/docs/rules/risky-octal.md b/docs/rules/risky-octal.md new file mode 120000 index 0000000..dffaad5 --- /dev/null +++ b/docs/rules/risky-octal.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/risky_octal.md
\ No newline at end of file diff --git a/docs/rules/risky-shell-pipe.md b/docs/rules/risky-shell-pipe.md new file mode 120000 index 0000000..444efa2 --- /dev/null +++ b/docs/rules/risky-shell-pipe.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/risky_shell_pipe.md
\ No newline at end of file diff --git a/docs/rules/role-name.md b/docs/rules/role-name.md new file mode 120000 index 0000000..e050ccd --- /dev/null +++ b/docs/rules/role-name.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/role_name.md
\ No newline at end of file diff --git a/docs/rules/run-once.md b/docs/rules/run-once.md new file mode 120000 index 0000000..b238e25 --- /dev/null +++ b/docs/rules/run-once.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/run_once.md
\ No newline at end of file diff --git a/docs/rules/schema.md b/docs/rules/schema.md new file mode 120000 index 0000000..a278289 --- /dev/null +++ b/docs/rules/schema.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/schema.md
\ No newline at end of file diff --git a/docs/rules/syntax-check.md b/docs/rules/syntax-check.md new file mode 120000 index 0000000..0e5e8f1 --- /dev/null +++ b/docs/rules/syntax-check.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/syntax_check.md
\ No newline at end of file diff --git a/docs/rules/var-naming.md b/docs/rules/var-naming.md new file mode 120000 index 0000000..50bc37b --- /dev/null +++ b/docs/rules/var-naming.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/var_naming.md
\ No newline at end of file diff --git a/docs/rules/warning.md b/docs/rules/warning.md new file mode 120000 index 0000000..9944a53 --- /dev/null +++ b/docs/rules/warning.md @@ -0,0 +1 @@ +../../src/ansiblelint/_internal/warning.md
\ No newline at end of file diff --git a/docs/rules/yaml.md b/docs/rules/yaml.md new file mode 120000 index 0000000..b033a46 --- /dev/null +++ b/docs/rules/yaml.md @@ -0,0 +1 @@ +../../src/ansiblelint/rules/yaml.md
\ No newline at end of file diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 0000000..9b14d83 --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,174 @@ +/* +Inspired by https://spec.draculatheme.com/ specification +*/ +:root { + --ansi-red: #ff5555; + --ansi-green: #50fa7b; + --ansi-blue: #265285; + --ansi-yellow: #ffb86c; /* Orange */ + --ansi-magenta: #bd93f9; /* Purple */ + --ansi-cyan: #8be9fd; + --ansi-black: #282a36; + --ansi-white: #f8f8f2; +} + +.-Color-Green, +.-Color-Faint-Green, +.-Color-Bold-Green { + color: var(--ansi-green); +} +.-Color-Red, +.-Color-Faint-Red, +.-Color-Bold-Red { + color: var(--ansi-red); +} +.-Color-Yellow, +.-Color-Faint-Yellow, +.-Color-Bold-Yellow { + color: var(--ansi-yellow); +} +.-Color-Blue, +.-Color-Faint-Blue, +.-Color-Bold-Blue { + color: var(--ansi-blue); +} +.-Color-Magenta, +.-Color-Faint-Magenta, +.-Color-Bold-Magenta { + color: var(--ansi-magenta); +} +.-Color-Cyan, +.-Color-Faint-Cyan, +.-Color-Bold-Cyan { + color: var(--ansi-cyan); +} +.-Color-White, +.-Color-Faint-White, +.-Color-Bold-White { + color: var(--ansi-white); +} +.-Color-Black, +.-Color-Faint-Black, +.-Color-Bold-Black { + color: var(--ansi-black); +} + +.-Color-Faint { + opacity: 0.5; +} + +.-Color-Bold { + font-weight: bold; +} + +.-Color-Black-BGBlack, +.-Color-BGBlack, +.-Color-Red-BGBlack, +.-Color-Green-BGBlack, +.-Color-Yellow-BGBlack, +.-Color-Blue-BGBlack, +.-Color-Magenta-BGBlack, +.-Color-Cyan-BGBlack, +.-Color-White-BGBlack { + background-color: var(--ansi-black); +} + +.-Color-Black-BGRed, +.-Color-BGRed, +.-Color-Red-BGRed, +.-Color-Green-BGRed, +.-Color-Yellow-BGRed, +.-Color-Blue-BGRed, +.-Color-Magenta-BGRed, +.-Color-Cyan-BGRed, +.-Color-White-BGRed { + background-color: var(--ansi-red); +} + +.-Color-Black-BGGreen, +.-Color-BGGreen, +.-Color-Red-BGGreen, +.-Color-Green-BGGreen, +.-Color-Yellow-BGGreen, +.-Color-Blue-BGGreen, +.-Color-Magenta-BGGreen, +.-Color-Cyan-BGGreen, +.-Color-White-BGGreen { + background-color: var(--ansi-green); +} + +.-Color-Black-BGYellow, +.-Color-BGYellow, +.-Color-Red-BGYellow, +.-Color-Green-BGYellow, +.-Color-Yellow-BGYellow, +.-Color-Blue-BGYellow, +.-Color-Magenta-BGYellow, +.-Color-Cyan-BGYellow, +.-Color-White-BGYellow { + background-color: var(--ansi-yellow); +} + +.-Color-Black-BGBlue, +.-Color-BGBlue, +.-Color-Red-BGBlue, +.-Color-Green-BGBlue, +.-Color-Yellow-BGBlue, +.-Color-Blue-BGBlue, +.-Color-Magenta-BGBlue, +.-Color-Cyan-BGBlue, +.-Color-White-BGBlue { + background-color: var(--ansi-blue); +} + +.-Color-Black-BGMagenta, +.-Color-BGMagenta, +.-Color-Red-BGMagenta, +.-Color-Green-BGMagenta, +.-Color-Yellow-BGMagenta, +.-Color-Blue-BGMagenta, +.-Color-Magenta-BGMagenta, +.-Color-Cyan-BGMagenta, +.-Color-White-BGMagenta { + background-color: var(--ansi-magenta); +} + +.-Color-Black-BGCyan, +.-Color-BGCyan, +.-Color-Red-BGCyan, +.-Color-Green-BGCyan, +.-Color-Yellow-BGCyan, +.-Color-Blue-BGCyan, +.-Color-Magenta-BGCyan, +.-Color-Cyan-BGCyan, +.-Color-White-BGCyan { + background-color: var(--ansi-cyan); +} + +.-Color-Black-BGWhite, +.-Color-BGWhite, +.-Color-Red-BGWhite, +.-Color-Green-BGWhite, +.-Color-Yellow-BGWhite, +.-Color-Blue-BGWhite, +.-Color-Magenta-BGWhite, +.-Color-Cyan-BGWhite, +.-Color-White-BGWhite { + background-color: var(--ansi-white); +} + +.-Color-Black-BGBlack, +.-Color-Red-BGRed, +.-Color-Blue-BGBlue { + text-shadow: 0 0 1px var(--ansi-white); +} + +.-Color-Green-BGGreen, +.-Color-Yellow-BGYellow, +.-Color-Cyan-BGCyan, +.-Color-White-BGWhite, +.-Color-Magenta-BGMagenta, +.-Color-Cyan-BGGreen, +.-Color-Green-BGCyan { + text-shadow: 0 0 1px var(--ansi-black); +} diff --git a/docs/usage.md b/docs/usage.md new file mode 100644 index 0000000..bca2769 --- /dev/null +++ b/docs/usage.md @@ -0,0 +1,255 @@ +# Using + +## Using commands + +After you install Ansible-lint, run `ansible-lint --help` to display available +commands and their options. + +```bash exec="1" source="console" +ansible-lint --help +``` + +### Command output + +Ansible-lint prints output on both `stdout` and `stderr`. + +- `stdout` displays rule violations. +- `stderr` displays logging and free-form messages like statistics. + +Most `ansible-lint` examples use pep8 as the output format (`-p`) which is +machine parseable. + +Ansible-lint also print errors using their [annotation] format when it detects +the `GITHUB_ACTIONS=true` and `GITHUB_WORKFLOW=...` variables. + +[annotation]: + https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message + +## Caching + +For optimal performance, Ansible-lint creates caches with installed or mocked +roles, collections, and modules in the `{project_dir}/.cache` folder. The +location of `{project_dir}` is passed with a command line argument, determined +by the location of the configuration file, git project top-level directory, or +user home directory. + +To perform faster re-runs, Ansible-lint does not automatically clean the cache. +If required you can do this manually by simply deleting the `.cache` folder. +Ansible-lint creates a new cache on the next invocation. + +You should add the `.cache` folder to the `.gitignore` file in your git +repositories. + +## Using progressive mode (deprecated) + +!!! warning + + This feature is deprecated and will be removed in the next major release. + We encourage you to use [ignore file](configuring.md#ignoring-rules-for-entire-files) + instead. + +For easier adoption, Ansible-lint can alert for rule violations that occur since +the last commit. This allows new code to be merged without any rule violations +while allowing content developers to address historical violations at a +different pace. + +The `--progressive` option runs Ansible-lint twice if rule violations exist in +your content. The second run is performed against a temporary git working copy +that contains the last commit. Rule violations that exist in the last commit are +ignored and Ansible-lint displays only the violations that exist in the new +commit. + +## Linting playbooks and roles + +Ansible-lint recommends following the +{ref}`collection structure layout <collection_structure>` whether you plan to +build a collection or not. + +Following that layout assures the best integration with all ecosystem tools +because it helps those tools better distinguish between random YAML files and +files managed by Ansible. When you call `ansible-lint` without arguments, it +uses internal heuristics to determine file types. + +You can specify the list of **roles** or **playbooks** that you want to lint +with the `-p` argument. For example, to lint `examples/playbooks/play.yml` and +`examples/roles/bobbins`, use the following command: + +```bash exec="1" source="console" +ansible-lint --offline -p examples/playbooks/play.yml examples/roles/bobbins || true +``` + +## Running example playbooks + +Ansible-lint includes an `ansible-lint/examples` folder that contains example +playbooks with different rule violations and undesirable characteristics. You +can run `ansible-lint` on the example playbooks to observe Ansible-lint in +action, as follows: + +```bash exec="1" source="console" +ansible-lint --offline -p examples/playbooks/example.yml >/dev/null || true +``` + +Ansible-lint also handles playbooks that include other playbooks, tasks, +handlers, or roles, as the `examples/playbooks/include.yml` example +demonstrates. + +```bash exec="1" source="console" +ansible-lint --offline -p examples/playbooks/include.yml 2>/dev/null || true +``` + +You can generate `JSON` reports based on the code-climate specification as the +`examples/playbooks/norole.yml` example demonstrates. + +```bash exec="1" source="tabbed-left" result="json" +ansible-lint --offline -f json examples/playbooks/norole.yml 2>/dev/null || true +``` + +## Specifying rules at runtime + +By default, `ansible-lint` applies rules found in +`ansible-lint/src/ansiblelint/rules`. Use the `-r /path/to/custom-rules` option +to specify the directory path to a set of custom rules. For multiple custom rule +sets, pass each set with a separate `-r` option. + +You can also combine the default rules with custom rules with the `-R` option +along with one or more `-r` options. + +### Including rules with tags + +Each rule has an associated set of one or more tags. Use the `-T` option to view +the list of tags for each available rule. + +You can then use the `-t` option to specify a tag and include the associated +rules in the lint run. For example, the following `ansible-lint` command applies +only the rules associated with the _idempotency_ tag: + +```bash exec="1" source="console" +ansible-lint -t idempotency playbook.yml +``` + +The following shows the available tags in an example set of rules and the rules +associated with each tag: + +```bash exec="1" source="console" +ansible-lint -T 2>/dev/null +``` + +### Excluding rules with tags + +To exclude rules by identifiers or tags, use the `-x SKIP_LIST` option. For +example, the following command applies all rules except those with the +_formatting_ and _metadata_ tags: + +```bash +$ ansible-lint -x formatting,metadata playbook.yml +``` + +### Ignoring rules + +To only warn about rules, use the `-w WARN_LIST` option. For example, the +following command displays only warns about violations with rules associated +with the `experimental` tag: + +```console +$ ansible-lint -w experimental playbook.yml +``` + +By default, the `WARN_LIST` includes the `['experimental']` tag. If you define a +custom `WARN_LIST` you must add `'experimental'` so that Ansible-lint does not +fail against experimental rules. + +## Muting warnings to avoid false positives + +Not all linting rules are precise, some are general rules of thumb. Advanced +_git_, _yum_ or _apt_ usage, for example, can be difficult to achieve in a +playbook. In cases like this, Ansible-lint can incorrectly trigger rule +violations. + +To disable rule violations for specific tasks, and mute false positives, add +`# noqa: [rule_id]` to the end of the line. It is best practice to add a comment +that explains why rules are disabled. + +You can add the `# noqa: [rule_id]` comment to the end of any line in a task. +You can also skip multiple rules with a space-separated list. + +```yaml +- name: This task would typically fire git-latest and partial-become rules + become_user: alice # noqa: git-latest partial-become + ansible.builtin.git: src=/path/to/git/repo dest=checkout +``` + +If the rule is line-based, `# noqa: [rule_id]` must be at the end of the line. + +```yaml +- name: This would typically fire jinja[spacing] + get_url: + url: http://example.com/file.conf + dest: "{{dest_proj_path}}/foo.conf" # noqa: jinja[spacing] +``` + +If you want Ansible-lint to skip a rule entirely, use the `-x` command line +argument or add it to `skip_list` in your configuration. + +The least preferred method of skipping rules is to skip all task-based rules for +a task, which does not skip line-based rules. You can use the +`skip_ansible_lint` tag with all tasks or the `warn` parameter with the +_command_ or _shell_ modules, for example: + +```yaml +- name: This would typically fire deprecated-command-syntax + command: warn=no chmod 644 X + +- name: This would typically fire command-instead-of-module + command: git pull --rebase + args: + warn: false + +- name: This would typically fire git-latest + git: src=/path/to/git/repo dest=checkout + tags: + - skip_ansible_lint +``` + +## Applying profiles + +Ansible-lint profiles allow content creators to progressively improve the +quality of Ansible playbooks, roles, and collections. + +During early development cycles, you need Ansible-lint rules to be less strict. +Starting with the minimal profile ensures that Ansible can load your content. As +you move to the next stage of developing content, you can gradually apply +profiles to avoid common pitfalls and brittle complexity. Then, when you are +ready to publish or share your content, you can use the `shared` and +`production` profiles with much stricter rules. These profiles harden security, +guarantee reliability, and ensure your Ansible content is easy for others to +contribute to and use. + +!!! note + + Tags such as `opt-in` and `experimental` do not take effect for rules that are included in profiles, directly or indirectly. + If a rule is in a profile, Ansible-lint applies that rule to the content. + +After you install and configure `ansible-lint`, you can apply profiles as +follows: + +1. View available profiles with the `--list-profiles` flag. + + ```bash + ansible-lint --list-profiles + ``` + +2. Specify a profile with the `--profile` parameter to lint your content with + those rules, for example: + +- Enforce standard styles and formatting with the `basic` profile. + + ```bash + ansible-lint --profile=basic + ``` + +- Ensure automation consistency, reliability, and security with the `safety` + profile. + + ```bash + ansible-lint --profile=safety + ``` diff --git a/examples/.config/ansible-lint.yml b/examples/.config/ansible-lint.yml new file mode 120000 index 0000000..3da8d98 --- /dev/null +++ b/examples/.config/ansible-lint.yml @@ -0,0 +1 @@ +../../.ansible-lint
\ No newline at end of file diff --git a/examples/.config/molecule/config.yml b/examples/.config/molecule/config.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/.config/molecule/config.yml diff --git a/examples/Taskfile.yml b/examples/Taskfile.yml new file mode 100644 index 0000000..9b0913d --- /dev/null +++ b/examples/Taskfile.yml @@ -0,0 +1,8 @@ +--- +# Sample taskfile, for testing linter ability to identify it +version: "3" +output: group +vars: + HOSTNAME: # <-- this is valid for Taskfiles but not for ansible files + sh: echo ${HOSTNAME:-localhost} +tasks: {} diff --git a/examples/ansible-navigator.yml b/examples/ansible-navigator.yml new file mode 100644 index 0000000..56e89cb --- /dev/null +++ b/examples/ansible-navigator.yml @@ -0,0 +1,3 @@ +--- +# see https://ansible-navigator.readthedocs.io/en/latest/settings/ +ansible-navigator: {} diff --git a/examples/broken/.ansible-lint b/examples/broken/.ansible-lint new file mode 100644 index 0000000..6428f2c --- /dev/null +++ b/examples/broken/.ansible-lint @@ -0,0 +1,2 @@ +# invalid ansible-lint config file +foo: bar # invalid based on schema diff --git a/examples/broken/ansible-navigator.yml b/examples/broken/ansible-navigator.yml new file mode 100644 index 0000000..cb4f4f6 --- /dev/null +++ b/examples/broken/ansible-navigator.yml @@ -0,0 +1,3 @@ +--- +# https://ansible-navigator.readthedocs.io/en/latest/settings/ +ansible: {} # invalid as it is missing 'ansible-navigator' parent key diff --git a/examples/broken/encoding.j2 b/examples/broken/encoding.j2 new file mode 100644 index 0000000..2616fcc --- /dev/null +++ b/examples/broken/encoding.j2 @@ -0,0 +1 @@ +À-Á diff --git a/examples/broken/yaml-with-tabs/invalid-due-tabs.yaml b/examples/broken/yaml-with-tabs/invalid-due-tabs.yaml new file mode 100644 index 0000000..0667f2e --- /dev/null +++ b/examples/broken/yaml-with-tabs/invalid-due-tabs.yaml @@ -0,0 +1,4 @@ +{ + "data": "this is not a valid YAML file as specification forbids tabs", + } +} diff --git a/examples/broken_collection_meta_runtime/meta/runtime.yml b/examples/broken_collection_meta_runtime/meta/runtime.yml new file mode 100644 index 0000000..41f4ead --- /dev/null +++ b/examples/broken_collection_meta_runtime/meta/runtime.yml @@ -0,0 +1,2 @@ +--- +foo: bar # should fail meta-runtime schema validation diff --git a/examples/changelogs/changelog.yaml b/examples/changelogs/changelog.yaml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/changelogs/changelog.yaml diff --git a/examples/collection/CHANGELOG.rst b/examples/collection/CHANGELOG.rst new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/collection/CHANGELOG.rst diff --git a/examples/collection/galaxy.yml b/examples/collection/galaxy.yml new file mode 100644 index 0000000..e509486 --- /dev/null +++ b/examples/collection/galaxy.yml @@ -0,0 +1,17 @@ +--- +name: foo +namespace: bar +version: 0.0.0 # noqa: galaxy[version-incorrect] +authors: + - John +readme: ../README.md +description: "..." +dependencies: + other_namespace.collection1: ">=1.0.0" + other_namespace.collection2: ">=2.0.0,<3.0.0" + anderson55.my_collection: "*" # note: "*" selects the highest version available +license: + - GPL # <-- invalid license values based on galaxy schema + - Apache +repository: some-url +tags: [networking, test_tag] diff --git a/examples/ee_broken/execution-environment.yml b/examples/ee_broken/execution-environment.yml new file mode 100644 index 0000000..6b15f66 --- /dev/null +++ b/examples/ee_broken/execution-environment.yml @@ -0,0 +1,3 @@ +--- +# This should fail Execution Environment Schema validation +foo: bar diff --git a/examples/execution-environment.yml b/examples/execution-environment.yml new file mode 100644 index 0000000..9bed441 --- /dev/null +++ b/examples/execution-environment.yml @@ -0,0 +1,4 @@ +--- +version: 1 +dependencies: + galaxy: requirements.yml diff --git a/examples/galaxy_no_required_tags/fail/changelogs/changelog.yaml b/examples/galaxy_no_required_tags/fail/changelogs/changelog.yaml new file mode 100644 index 0000000..52e7f38 --- /dev/null +++ b/examples/galaxy_no_required_tags/fail/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +releases: {} diff --git a/examples/galaxy_no_required_tags/fail/galaxy.yml b/examples/galaxy_no_required_tags/fail/galaxy.yml new file mode 100644 index 0000000..48ee587 --- /dev/null +++ b/examples/galaxy_no_required_tags/fail/galaxy.yml @@ -0,0 +1,11 @@ +--- +namespace: bar +name: foo +version: 1.0.0 # <-- that version is not valid, should be 1.0.0 or greater +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ansible-collections/community.REPO_NAME diff --git a/examples/galaxy_no_required_tags/pass/changelogs/changelog.yaml b/examples/galaxy_no_required_tags/pass/changelogs/changelog.yaml new file mode 100644 index 0000000..52e7f38 --- /dev/null +++ b/examples/galaxy_no_required_tags/pass/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +releases: {} diff --git a/examples/galaxy_no_required_tags/pass/galaxy.yml b/examples/galaxy_no_required_tags/pass/galaxy.yml new file mode 100644 index 0000000..0f62481 --- /dev/null +++ b/examples/galaxy_no_required_tags/pass/galaxy.yml @@ -0,0 +1,12 @@ +--- +namespace: bar +name: foo +version: 1.0.0 # <-- that version is not valid, should be 1.0.0 or greater +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ansible-collections/community.REPO_NAME +tags: [networking, test_tag] diff --git a/examples/group_vars/all.yml b/examples/group_vars/all.yml new file mode 100644 index 0000000..ef692db --- /dev/null +++ b/examples/group_vars/all.yml @@ -0,0 +1,2 @@ +--- +some_var: some_value_defined_in_group_vars_all diff --git a/examples/host_vars/localhost.yml b/examples/host_vars/localhost.yml new file mode 100644 index 0000000..9c058f5 --- /dev/null +++ b/examples/host_vars/localhost.yml @@ -0,0 +1,2 @@ +--- +some_var: some_value_defined_in_host_vars_localhost diff --git a/examples/inventory/broken_dev_inventory.yml b/examples/inventory/broken_dev_inventory.yml new file mode 100644 index 0000000..0d7a793 --- /dev/null +++ b/examples/inventory/broken_dev_inventory.yml @@ -0,0 +1,3 @@ +--- +all: + foo: {} # invalid based on inventory json schema diff --git a/examples/inventory/inventory.yml b/examples/inventory/inventory.yml new file mode 100644 index 0000000..8752d9b --- /dev/null +++ b/examples/inventory/inventory.yml @@ -0,0 +1,31 @@ +--- +# https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html +ungrouped: {} +all: + hosts: + mail.example.com: + children: + webservers: + hosts: + foo.example.com: + bar.example.com: + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: + east: + hosts: + foo.example.com: + one.example.com: + two.example.com: + west: + hosts: + bar.example.com: + three.example.com: + prod: + children: + east: {} + test: + children: + west: {} diff --git a/examples/inventory/production.yml b/examples/inventory/production.yml new file mode 100644 index 0000000..a8a7deb --- /dev/null +++ b/examples/inventory/production.yml @@ -0,0 +1,38 @@ +--- +all: + hosts: + mail.example.com: + children: + webservers: + hosts: + foo.example.com: + bar.example.com: + # ranges are supported: + www[01:50].example.com: + www[01:50:2].example.com: + # these are variables: + var_1: value_1 + another_var: 200 + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: + east: + hosts: + foo.example.com: + one.example.com: + two.example.com: + west: + hosts: + bar.example.com: + three.example.com: + prod: + children: + east: + test: + children: + west: + # add variables for all hosts + vars: + my_var: 123 diff --git a/examples/lineno.yml b/examples/lineno.yml new file mode 100644 index 0000000..a741d91 --- /dev/null +++ b/examples/lineno.yml @@ -0,0 +1,3 @@ +--- +- tasks: + - git: repo=hello diff --git a/examples/meta/changelogs/changelog.yaml b/examples/meta/changelogs/changelog.yaml new file mode 100644 index 0000000..52e7f38 --- /dev/null +++ b/examples/meta/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +releases: {} diff --git a/examples/meta/galaxy.yml b/examples/meta/galaxy.yml new file mode 100644 index 0000000..b981f5c --- /dev/null +++ b/examples/meta/galaxy.yml @@ -0,0 +1,15 @@ +--- +name: foo +namespace: bar +version: 0.2.3 # <-- that version is not valid, should be 1.0.0 or greater +authors: + - John +readme: ../README.md +description: "..." +dependencies: + other_namespace.collection1: ">=1.0.0" + other_namespace.collection2: ">=2.0.0,<3.0.0" + anderson55.my_collection: "*" # note: "*" selects the highest version available +license: + - Apache-2.0 +tags: [networking, test_tag] diff --git a/examples/meta/runtime.yml b/examples/meta/runtime.yml new file mode 100644 index 0000000..895980b --- /dev/null +++ b/examples/meta/runtime.yml @@ -0,0 +1,3 @@ +--- +# https://docs.ansible.com/ansible/latest/dev_guide/developing_collections_structure.html +requires_ansible: ">=2.10,<2.20" diff --git a/examples/meta_runtime_version_checks/fail_0/meta/runtime.yml b/examples/meta_runtime_version_checks/fail_0/meta/runtime.yml new file mode 100644 index 0000000..5ff8a8c --- /dev/null +++ b/examples/meta_runtime_version_checks/fail_0/meta/runtime.yml @@ -0,0 +1,2 @@ +--- +requires_ansible: ">=2.20.0" diff --git a/examples/meta_runtime_version_checks/fail_1/meta/runtime.yml b/examples/meta_runtime_version_checks/fail_1/meta/runtime.yml new file mode 100644 index 0000000..eb5ade0 --- /dev/null +++ b/examples/meta_runtime_version_checks/fail_1/meta/runtime.yml @@ -0,0 +1,2 @@ +--- +requires_ansible: ">=2.9.0,<2.20" diff --git a/examples/meta_runtime_version_checks/fail_2/meta/runtime.yml b/examples/meta_runtime_version_checks/fail_2/meta/runtime.yml new file mode 100644 index 0000000..92db835 --- /dev/null +++ b/examples/meta_runtime_version_checks/fail_2/meta/runtime.yml @@ -0,0 +1,2 @@ +--- +requires_ansible: "2.13.0,<2.15" diff --git a/examples/meta_runtime_version_checks/pass/meta/runtime.yml b/examples/meta_runtime_version_checks/pass/meta/runtime.yml new file mode 100644 index 0000000..2b69758 --- /dev/null +++ b/examples/meta_runtime_version_checks/pass/meta/runtime.yml @@ -0,0 +1,2 @@ +--- +requires_ansible: ">=2.13.0,<2.15" diff --git a/examples/no_changelog/galaxy.yml b/examples/no_changelog/galaxy.yml new file mode 100644 index 0000000..2c35693 --- /dev/null +++ b/examples/no_changelog/galaxy.yml @@ -0,0 +1,17 @@ +--- +name: foo +namespace: bar +version: 0.0.0 # noqa: galaxy[version-incorrect] +authors: + - John +readme: ../README.md +description: "..." +dependencies: + other_namespace.collection1: ">=1.0.0" + other_namespace.collection2: ">=2.0.0,<3.0.0" + anderson55.my_collection: "*" # note: "*" selects the highest version available +license: + - GPL-3.0-or-later + - Apache-2.0 +repository: some-url +tags: [networking, test_tag] diff --git a/examples/no_collection_version/changelogs/changelog.yaml b/examples/no_collection_version/changelogs/changelog.yaml new file mode 100644 index 0000000..52e7f38 --- /dev/null +++ b/examples/no_collection_version/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +releases: {} diff --git a/examples/no_collection_version/galaxy.yml b/examples/no_collection_version/galaxy.yml new file mode 100644 index 0000000..95d9d18 --- /dev/null +++ b/examples/no_collection_version/galaxy.yml @@ -0,0 +1,14 @@ +--- +name: galaxy_no_version +namespace: test +readme: ../README.md +authors: + - John +description: your collection description +license: + - GPL + - Apache + +dependencies: {} +repository: http://example.com/repository +tags: [networking, test_tag] diff --git a/examples/other/guess-1.yml b/examples/other/guess-1.yml new file mode 100644 index 0000000..b11fb9c --- /dev/null +++ b/examples/other/guess-1.yml @@ -0,0 +1,3 @@ +--- +- name: Minimal yaml to determine it as a playbook + hosts: localhost diff --git a/examples/other/some.j2.yaml b/examples/other/some.j2.yaml new file mode 100644 index 0000000..5d34f55 --- /dev/null +++ b/examples/other/some.j2.yaml @@ -0,0 +1,2 @@ +# Used to validate that a templated YAML file does not confuse the linter +{% include 'port.j2' %} diff --git a/examples/other/some.yaml-too b/examples/other/some.yaml-too new file mode 100644 index 0000000..50a2ada --- /dev/null +++ b/examples/other/some.yaml-too @@ -0,0 +1 @@ +# Used to test custom kinds defined in .ansible-config diff --git a/examples/playbooks/.ansible-lint-only-builtins-allow b/examples/playbooks/.ansible-lint-only-builtins-allow new file mode 100644 index 0000000..ddcbe6c --- /dev/null +++ b/examples/playbooks/.ansible-lint-only-builtins-allow @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-lint-config.json +# Mock modules or roles in order to pass ansible-playbook --syntax-check +mock_modules: + - fake_namespace.fake_collection.fake_module + +only_builtins_allow_collections: + - fake_namespace.fake_collection +only_builtins_allow_modules: + - zuul_return + +# skip rule to test builtin rule +skip_list: + - args[module] diff --git a/examples/playbooks/README.md b/examples/playbooks/README.md new file mode 100644 index 0000000..48965c2 --- /dev/null +++ b/examples/playbooks/README.md @@ -0,0 +1,2 @@ +The `./roles` symlink helps Ansible find local roles used by files from +current directory. diff --git a/examples/playbooks/always-run-success.yml b/examples/playbooks/always-run-success.yml new file mode 100644 index 0000000..43d1fea --- /dev/null +++ b/examples/playbooks/always-run-success.yml @@ -0,0 +1,2 @@ +--- +- hosts: localhost diff --git a/examples/playbooks/become.transformed.yml b/examples/playbooks/become.transformed.yml new file mode 100644 index 0000000..ce53f56 --- /dev/null +++ b/examples/playbooks/become.transformed.yml @@ -0,0 +1,15 @@ +--- +- name: Fixture + hosts: all + tasks: + - name: Clone content repository + ansible.builtin.git: + repo: "{{ archive_services_repo_url }}" + dest: /home/www + accept_hostkey: true + version: master + update: false + become: true + become_user: nobody + notify: + - restart apache2 diff --git a/examples/playbooks/become.yml b/examples/playbooks/become.yml new file mode 100644 index 0000000..ce53f56 --- /dev/null +++ b/examples/playbooks/become.yml @@ -0,0 +1,15 @@ +--- +- name: Fixture + hosts: all + tasks: + - name: Clone content repository + ansible.builtin.git: + repo: "{{ archive_services_repo_url }}" + dest: /home/www + accept_hostkey: true + version: master + update: false + become: true + become_user: nobody + notify: + - restart apache2 diff --git a/examples/playbooks/block.yml b/examples/playbooks/block.yml new file mode 100644 index 0000000..6194cc6 --- /dev/null +++ b/examples/playbooks/block.yml @@ -0,0 +1,26 @@ +--- +- hosts: all + + pre_tasks: + - { import_tasks: does-not-exist.yml } + + tasks: + - block: + - name: Successful debug message + debug: msg='i execute normally' + - name: Failure command + ansible.builtin.command: /bin/false + changed_when: false + - name: Never reached debug message + debug: msg='i never execute, cause ERROR!' + rescue: + - name: Exception debug message + ansible.builtin.debug: msg='I caught an error' + - name: Another failure command + ansible.builtin.command: /bin/false + changed_when: false + - name: Another missed debug message + debug: msg='I also never execute :-(' + always: + - name: Always reached debug message + ansible.builtin.debug: msg="this always executes" diff --git a/examples/playbooks/blockincludes.yml b/examples/playbooks/blockincludes.yml new file mode 100644 index 0000000..b8387a8 --- /dev/null +++ b/examples/playbooks/blockincludes.yml @@ -0,0 +1,21 @@ +--- +- name: Fixture + hosts: localhost + vars: + varset: varset + tasks: + - name: Block level 1 + block: + - name: Include under block level 1 # noqa: fqcn + include_tasks: tasks/nestedincludes.yml + - name: Block level 2 + block: + # - include_tasks: "{{ varnotset }}.yml" + - name: Block level 3 + block: + - name: Include under block level 3 # noqa: deprecated-module + ansible.builtin.include: "{{ varset }}.yml" + - name: Block level 4 + block: + - name: INCLUDE under block level 4 + ansible.builtin.include_tasks: tasks/directory with spaces/main.yml diff --git a/examples/playbooks/blockincludes2.yml b/examples/playbooks/blockincludes2.yml new file mode 100644 index 0000000..f11667b --- /dev/null +++ b/examples/playbooks/blockincludes2.yml @@ -0,0 +1,18 @@ +--- +- name: Fixture + hosts: webservers + vars: + varset: varset + tasks: + - name: I am a block + block: + - name: Include under block + ansible.builtin.include_tasks: tasks/nestedincludes.yml + # - block: + # - include_tasks: "{{ varnotset }}.yml" + rescue: + - name: Include under rescue + ansible.builtin.include_tasks: "{{ varset }}.yml" + always: + - name: Include under always + ansible.builtin.include_tasks: tasks/directory with spaces/main.yml diff --git a/examples/playbooks/bracketsmatchtest.yml b/examples/playbooks/bracketsmatchtest.yml new file mode 100644 index 0000000..3b335ff --- /dev/null +++ b/examples/playbooks/bracketsmatchtest.yml @@ -0,0 +1,4 @@ +--- +val1: "{{dest}}" +val2: worry +val3: "{{victory}}" diff --git a/examples/playbooks/collections/.gitignore b/examples/playbooks/collections/.gitignore new file mode 100644 index 0000000..3cd6e1c --- /dev/null +++ b/examples/playbooks/collections/.gitignore @@ -0,0 +1,2 @@ +ansible_collections +requirements.yml diff --git a/examples/playbooks/command-check-failure.yml b/examples/playbooks/command-check-failure.yml new file mode 100644 index 0000000..f249ddd --- /dev/null +++ b/examples/playbooks/command-check-failure.yml @@ -0,0 +1,13 @@ +--- +- hosts: localhost + tasks: + - name: Command without checks + ansible.builtin.command: echo blah + args: + chdir: X + + - name: Shell without checks + ansible.builtin.shell: echo blah + args: + chdir: X + become_method: xx diff --git a/examples/playbooks/command-check-success.yml b/examples/playbooks/command-check-success.yml new file mode 100644 index 0000000..b855b8a --- /dev/null +++ b/examples/playbooks/command-check-success.yml @@ -0,0 +1,60 @@ +--- +- hosts: localhost + tasks: + - name: Command with creates check + ansible.builtin.command: echo blah + args: + creates: Z + + - name: Command with removes check + ansible.builtin.command: echo blah + args: + removes: Z + + - name: Command with changed_when + ansible.builtin.command: echo blah + changed_when: false + + - name: Command with inline creates + ansible.builtin.command: creates=Z echo blah + + - name: Command with inline removes + ansible.builtin.command: removes=Z echo blah + + - name: Command with cmd + command: + cmd: echo blah + args: + creates: Z + + - name: Use shell with creates check + shell: echo blah + args: + creates: Z + + - name: Use shell with removes check + shell: echo blah + args: + removes: Z + + - name: Use shell with changed_when + shell: echo blah + changed_when: false + + - name: Use shell with inline creates + shell: creates=Z echo blah + + - name: Use shell with inline removes + shell: removes=Z echo blah + + - name: Use shell with cmd + shell: + cmd: echo blah + args: + creates: Z + +- hosts: localhost + handlers: + - name: Restart something + command: do something + - include: handlers/included-handlers.yml diff --git a/examples/playbooks/common-include-1.yml b/examples/playbooks/common-include-1.yml new file mode 100644 index 0000000..3a4691f --- /dev/null +++ b/examples/playbooks/common-include-1.yml @@ -0,0 +1,10 @@ +--- +- name: Fixture for test_files_not_scanned_twice + hosts: localhost + gather_facts: false + tasks: + - name: Some include + ansible.builtin.import_tasks: tasks/included-with-lint.yml + - name: Some include_tasks with file and jinja2 + ansible.builtin.include_tasks: + file: "{{ 'tasks/included-with-lint.yml' }}" diff --git a/examples/playbooks/common-include-2.yml b/examples/playbooks/common-include-2.yml new file mode 100644 index 0000000..b3a942b --- /dev/null +++ b/examples/playbooks/common-include-2.yml @@ -0,0 +1,7 @@ +--- +- name: Fixture + hosts: localhost + gather_facts: false + tasks: + - name: Some include + ansible.builtin.include_tasks: tasks/included-with-lint.yml diff --git a/examples/playbooks/conflicting_action.yml b/examples/playbooks/conflicting_action.yml new file mode 100644 index 0000000..5355323 --- /dev/null +++ b/examples/playbooks/conflicting_action.yml @@ -0,0 +1,12 @@ +--- +- hosts: localhost + tasks: + - name: Foo + ansible.builtin.debug: + msg: bar + ansible.builtin.command: echo +# On this file ansible-playbook --syntax-check reports: +# ERROR! conflicting action statements: debug, command +# +# The error appears to be in 'test/conflicting_action.yml': line 3, column 7, but may +# be elsewhere in the file depending on the exact syntax problem. diff --git a/examples/playbooks/contains_secrets.transformed.yml b/examples/playbooks/contains_secrets.transformed.yml new file mode 100644 index 0000000..cf5388c --- /dev/null +++ b/examples/playbooks/contains_secrets.transformed.yml @@ -0,0 +1,18 @@ +--- +- name: Fixture + hosts: localhost + vars: + plain: hello123 + # spell-checker: disable-next-line + # just 'hello123' encrypted with 'letmein' for test purposes + secret: !vault | + $ANSIBLE_VAULT;1.1;AES256 + 63346434613163653866303630313238626164313961613935373137323639636333393338386232 + 3735313061316666343839343665383036623237353263310a623639336530383433343833653138 + 30393032393534316164613834393864616566646164363830316664623636643731383164376163 + 3736653037356435310a303533383533353739323834343637366438633766666163656330343631 + 3066 + tasks: + - name: Just a debug task + ansible.builtin.debug: + msg: hello world diff --git a/examples/playbooks/contains_secrets.yml b/examples/playbooks/contains_secrets.yml new file mode 100644 index 0000000..cf5388c --- /dev/null +++ b/examples/playbooks/contains_secrets.yml @@ -0,0 +1,18 @@ +--- +- name: Fixture + hosts: localhost + vars: + plain: hello123 + # spell-checker: disable-next-line + # just 'hello123' encrypted with 'letmein' for test purposes + secret: !vault | + $ANSIBLE_VAULT;1.1;AES256 + 63346434613163653866303630313238626164313961613935373137323639636333393338386232 + 3735313061316666343839343665383036623237353263310a623639336530383433343833653138 + 30393032393534316164613834393864616566646164363830316664623636643731383164376163 + 3736653037356435310a303533383533353739323834343637366438633766666163656330343631 + 3066 + tasks: + - name: Just a debug task + ansible.builtin.debug: + msg: hello world diff --git a/examples/playbooks/custom_module.yml b/examples/playbooks/custom_module.yml new file mode 100644 index 0000000..05c4a87 --- /dev/null +++ b/examples/playbooks/custom_module.yml @@ -0,0 +1,9 @@ +--- +- name: Fixture + hosts: localhost + gather_facts: false + tags: + - "{{ foo }}" + tasks: + - name: Run custom module # noqa: fqcn[action] + fake_module: {} diff --git a/examples/playbooks/ematcher-rule.yml b/examples/playbooks/ematcher-rule.yml new file mode 100644 index 0000000..7d64127 --- /dev/null +++ b/examples/playbooks/ematcher-rule.yml @@ -0,0 +1,13 @@ +--- +- hosts: localhost + name: BANNED +- hosts: localhost + name: Another BANNED line + tasks: + - name: Foo + debug: + msg: A 3rd BANNED line + + - name: Bar + ansible.builtin.command: echo something + changed_when: false diff --git a/examples/playbooks/empty_playbook.yml b/examples/playbooks/empty_playbook.yml new file mode 100644 index 0000000..271de72 --- /dev/null +++ b/examples/playbooks/empty_playbook.yml @@ -0,0 +1,3 @@ +# an empty playbook which makes ansible-playbook --syntax-check report +# ERROR! Empty playbook, nothing to do +# with exit code 4 diff --git a/examples/playbooks/example.yml b/examples/playbooks/example.yml new file mode 100644 index 0000000..fa1a635 --- /dev/null +++ b/examples/playbooks/example.yml @@ -0,0 +1,49 @@ +--- +- name: Fixture for test_example + hosts: webservers + vars: + old_school: 1.2.3 + bracket: and close bracket + + tasks: + - name: Unset variable + action: ansible.builtin.command echo {{this_variable}} is not set in this playbook + + - name: Trailing whitespace + action: ansible.builtin.command echo do nothing + + - name: Run git check + action: ansible.builtin.git a=b c=d + + - name: Run git check 2 + action: ansible.builtin.git version=HEAD c=d + + - name: Run git check 3 + ansible.builtin.git: version=a1b2c3d4 repo=xyz bobbins=d + + - name: Executing git through command + action: ansible.builtin.command git clone blah + + - name: Executing git through command + action: ansible.builtin.command chdir=bobbins creates=whatever /usr/bin/git clone blah + + - name: Using git module + action: ansible.builtin.git repo=blah + + - name: Passing git as an argument to another task + action: ansible.builtin.debug msg="{{item}}" + with_items: + - git # yamllint wrong indentation + - bobbins + + - name: Yum latest + ansible.builtin.yum: state=latest name=httpd + + - ansible.builtin.debug: msg="debug task without a name" + + - name: Run apt latest + ansible.builtin.apt: state=latest name=apache2 + + - ansible.builtin.meta: flush_handlers + # empty task is currently accepted by ansible as valid code but not our schemas: + - diff --git a/examples/playbooks/extra_vars.yml b/examples/playbooks/extra_vars.yml new file mode 100644 index 0000000..d3832f1 --- /dev/null +++ b/examples/playbooks/extra_vars.yml @@ -0,0 +1,9 @@ +--- +- hosts: all + tags: + - baz + - "{{ foo }}" + tasks: + - name: Show `complex_variable` value loaded from `extra_vars` + ansible.builtin.debug: + msg: "{{ complex_variable }}" diff --git a/examples/playbooks/find_children.yml b/examples/playbooks/find_children.yml new file mode 100644 index 0000000..9e7a6df --- /dev/null +++ b/examples/playbooks/find_children.yml @@ -0,0 +1,10 @@ +--- +# Bug reproducer https://github.com/ansible/ansible-lint/issues/2846 +- name: Play + hosts: localhost + gather_facts: false + connection: local + roles: + - role: acme.sample4 + vars: + date_ko_1: "{{ lookup('pipe', 'date') }}" diff --git a/examples/playbooks/handlers/included-handlers.yml b/examples/playbooks/handlers/included-handlers.yml new file mode 100644 index 0000000..77e7440 --- /dev/null +++ b/examples/playbooks/handlers/included-handlers.yml @@ -0,0 +1,6 @@ +--- +- name: Restart xyz + service: name=xyz state=restarted +# see Issue #165 +- name: Command handler issue 165 + command: do something diff --git a/examples/playbooks/handlers/y.yml b/examples/playbooks/handlers/y.yml new file mode 100644 index 0000000..cd984f8 --- /dev/null +++ b/examples/playbooks/handlers/y.yml @@ -0,0 +1,3 @@ +--- +- name: Funny handler + action: service name=funny state=started force=true diff --git a/examples/playbooks/include-import-tasks-in-role.yml b/examples/playbooks/include-import-tasks-in-role.yml new file mode 100644 index 0000000..26a658a --- /dev/null +++ b/examples/playbooks/include-import-tasks-in-role.yml @@ -0,0 +1,5 @@ +--- +- name: Fixture + hosts: all + roles: + - role_with_task_inclusions diff --git a/examples/playbooks/include-in-block.yml b/examples/playbooks/include-in-block.yml new file mode 100644 index 0000000..9f3a8fe --- /dev/null +++ b/examples/playbooks/include-in-block.yml @@ -0,0 +1,6 @@ +--- +- name: Fixture + hosts: all + tasks: + - name: Include + ansible.builtin.include_tasks: tasks/include-in-block-inner.yml diff --git a/examples/playbooks/include.yml b/examples/playbooks/include.yml new file mode 100644 index 0000000..5596728 --- /dev/null +++ b/examples/playbooks/include.yml @@ -0,0 +1,18 @@ +--- +- hosts: bobbins + + pre_tasks: + - ansible.builtin.include_tasks: tasks/x.yml + + roles: + - hello + - { role: more_complex, t: z } + + tasks: + - ansible.builtin.include_tasks: tasks/x.yml + - ansible.builtin.include_tasks: tasks/x.yml y=z + + handlers: + - ansible.builtin.include_tasks: handlers/y.yml + +- ansible.builtin.include_tasks: play.yml diff --git a/examples/playbooks/jinja-spacing.yml b/examples/playbooks/jinja-spacing.yml new file mode 100644 index 0000000..b6824c2 --- /dev/null +++ b/examples/playbooks/jinja-spacing.yml @@ -0,0 +1,89 @@ +--- +# Should raise jinja[spacing] at tasks line 23, 26, 29, 54, 65 +- name: Fixture for testing jinja2[spacing] + hosts: all + tasks: + - name: Good variable format + ansible.builtin.debug: + msg: "{{ good_format }}" + - name: Good variable format + ansible.builtin.debug: + msg: "Value: {{ good_format }}" + - name: Good variable filter format + ansible.builtin.debug: + msg: "{{ good_format | filter }}" + - name: Good variable filter format + ansible.builtin.debug: + msg: "Value: {{ good_format | filter }}" + - name: Jinja escaping allowed + ansible.builtin.debug: + msg: "{{ '{{' }}" + - name: Jinja escaping allowed + # noqa: risky-shell-pipe + ansible.builtin.shell: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' + changed_when: false + - name: Jinja whitespace control allowed + ansible.builtin.debug: + msg: | + {{ good_format }}/ + {{- good_format }} + {{- good_format -}} + - name: Bad variable format + ansible.builtin.debug: + msg: "{{bad_format}}" # <-- 1 + - name: Bad variable format + ansible.builtin.debug: + msg: "Value: {{ bad_format}}" # <-- 2 + - name: Bad variable format + ansible.builtin.debug: + msg: "{{bad_format }}" # <-- 3 + - name: Bad variable filter format + ansible.builtin.debug: + msg: "{{ bad_format|filter }}" # <-- 4 + - name: Bad variable filter format + ansible.builtin.debug: + msg: "Value: {{ bad_format |filter }}" # <-- 5 + - name: Bad variable filter format + ansible.builtin.debug: + msg: "{{ bad_format| filter }}" # <-- 6 + - name: Not a jinja variable # noqa: jinja[spacing] + ansible.builtin.debug: + # spell-checker: disable-next-line + msg: data = ${lookup{$local_part}lsearch{/etc/aliases}} + - name: JSON inside jinja is valid + ansible.builtin.debug: + msg: "{{ {'test': {'subtest': variable}} }}" + - name: Avoid false positive on multiline + vars: + cases: + case1: >- + http://foo.com/{{ + case1 }} + case2: >- + http://bar.com/{{ + case2 }} + ansible.builtin.debug: + var: cases + + - name: Valid single line nested JSON false positive + ansible.builtin.debug: + msg: "{{ {'dummy_2': {'nested_dummy_1': 'value_1', 'nested_dummy_2': value_2}} | combine(dummy_1) }}" + + - name: Invalid single line nested JSON + ansible.builtin.debug: + msg: "{{ {'dummy_2': {'nested_dummy_1': 'value_1', 'nested_dummy_2': value_2}} | combine(dummy_1)}}" # <-- 7 + + - name: Valid multiline nested JSON false positive + ansible.builtin.debug: + msg: >- + {{ {'dummy_2': {'nested_dummy_1': value_1, + 'nested_dummy_2': value_2}} | + combine(dummy_1) }} + + - name: Invalid multiline nested JSON + ansible.builtin.debug: + # not an error currently because current implementation skips multiline expressions + msg: >- + {{ {'dummy_2': {'nested_dummy_1': value_1, + 'nested_dummy_2': value_2}} | + combine(dummy_1)}} diff --git a/examples/playbooks/json-schema-fail.yml b/examples/playbooks/json-schema-fail.yml new file mode 100644 index 0000000..5760632 --- /dev/null +++ b/examples/playbooks/json-schema-fail.yml @@ -0,0 +1,4 @@ +--- +- name: This should raise json-schema error, due to hosts missing the last letter + host: localhost + tasks: [] diff --git a/examples/playbooks/lots_of_warnings.transformed.yml b/examples/playbooks/lots_of_warnings.transformed.yml new file mode 100644 index 0000000..1bca9da --- /dev/null +++ b/examples/playbooks/lots_of_warnings.transformed.yml @@ -0,0 +1,1000 @@ +--- +# This playbook causes ansible-lint to output tons of warnings +# Enough to exceed typical stdout buffering size and thus to show the need for +# catching IOError (EPIPE) errors. + +- name: Fixture + hosts: webservers + tasks: + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah diff --git a/examples/playbooks/lots_of_warnings.yml b/examples/playbooks/lots_of_warnings.yml new file mode 100644 index 0000000..1bca9da --- /dev/null +++ b/examples/playbooks/lots_of_warnings.yml @@ -0,0 +1,1000 @@ +--- +# This playbook causes ansible-lint to output tons of warnings +# Enough to exceed typical stdout buffering size and thus to show the need for +# catching IOError (EPIPE) errors. + +- name: Fixture + hosts: webservers + tasks: + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah + - name: Executing git through command + action: ansible.builtin.command git clone blah diff --git a/examples/playbooks/mocked_dependency.yml b/examples/playbooks/mocked_dependency.yml new file mode 100644 index 0000000..531e0c1 --- /dev/null +++ b/examples/playbooks/mocked_dependency.yml @@ -0,0 +1,10 @@ +--- +- hosts: localhost + roles: + - mocked_role + - fake_namespace.fake_collection.fake_role + tasks: + - name: Some task + zuul_return: {} + - name: Mocked module from collection + fake_namespace.fake_collection.fake_module: {} diff --git a/examples/playbooks/multiline-brackets-do-not-match-test.yml b/examples/playbooks/multiline-brackets-do-not-match-test.yml new file mode 100644 index 0000000..f13315e --- /dev/null +++ b/examples/playbooks/multiline-brackets-do-not-match-test.yml @@ -0,0 +1,22 @@ +--- +- hosts: foo + roles: + - ../../../roles/base_os + - ../../../roles/repos + - { + role: ../../../roles/openshift_master, + oo_minion_ips: "{ hostvars['localhost'].oo_minion_ips | default(['']) }}", + oo_bind_ip: "{{ hostvars[inventory_hostname].ansible_eth0.ipv4.address | default(['']) }}" + } + - ../../../roles/pods + +- name: Set Origin specific facts on localhost (for later use) + hosts: localhost + gather_facts: false + tasks: + - name: Setting oo_minion_ips fact on localhost + ansible.builtin.set_fact: + oo_minion_ips: "{{ hostvars + | oo_select_keys(groups['tag_env-host-type-' + oo_env + '-openshift-minion']) + | oo_collect(attribute='ansible_eth0.ipv4.address') }" + when: groups['tag_env-host-type-' + oo_env + '-openshift-minion'] is defined diff --git a/examples/playbooks/multiline-bracketsmatchtest.yml b/examples/playbooks/multiline-bracketsmatchtest.yml new file mode 100644 index 0000000..86a19f7 --- /dev/null +++ b/examples/playbooks/multiline-bracketsmatchtest.yml @@ -0,0 +1,22 @@ +--- +- hosts: foo + roles: + - ../../../roles/base_os + - ../../../roles/repos + - { + role: ../../../roles/openshift_master, + oo_minion_ips: "{{ hostvars['localhost'].oo_minion_ips | default(['']) }}", + oo_bind_ip: "{{ hostvars[inventory_hostname].ansible_eth0.ipv4.address | default(['']) }}" + } + - ../../../roles/pods + +- name: Set Origin specific facts on localhost (for later use) + hosts: localhost + gather_facts: false + tasks: + - name: Setting oo_minion_ips fact on localhost + ansible.builtin.set_fact: + oo_minion_ips: "{{ hostvars + | oo_select_keys(groups['tag_env-host-type-' + oo_env + '-openshift-minion']) + | oo_collect(attribute='ansible_eth0.ipv4.address') }}" + when: groups['tag_env-host-type-' + oo_env + '-openshift-minion'] is defined diff --git a/examples/playbooks/nomatches.yml b/examples/playbooks/nomatches.yml new file mode 100644 index 0000000..a3fac5d --- /dev/null +++ b/examples/playbooks/nomatches.yml @@ -0,0 +1,9 @@ +--- +- hosts: whatever + + tasks: + - name: Hello world + ansible.builtin.action: debug msg="Hello!" + + - name: This should be fine too + ansible.builtin.action: file state=touch mode=0644 dest=./wherever diff --git a/examples/playbooks/nomatchestest.transformed.yml b/examples/playbooks/nomatchestest.transformed.yml new file mode 100644 index 0000000..4fba8ef --- /dev/null +++ b/examples/playbooks/nomatchestest.transformed.yml @@ -0,0 +1,9 @@ +--- +- name: Fixture + hosts: whatever + tasks: + - name: Hello world + action: debug msg="Hello!" # noqa: fqcn[action-core] + + - name: This should be fine too + action: file state=touch dest=./wherever mode=0600 # noqa: fqcn[action-core] diff --git a/examples/playbooks/nomatchestest.yml b/examples/playbooks/nomatchestest.yml new file mode 100644 index 0000000..4fba8ef --- /dev/null +++ b/examples/playbooks/nomatchestest.yml @@ -0,0 +1,9 @@ +--- +- name: Fixture + hosts: whatever + tasks: + - name: Hello world + action: debug msg="Hello!" # noqa: fqcn[action-core] + + - name: This should be fine too + action: file state=touch dest=./wherever mode=0600 # noqa: fqcn[action-core] diff --git a/examples/playbooks/noqa-nested.yml b/examples/playbooks/noqa-nested.yml new file mode 100644 index 0000000..505741e --- /dev/null +++ b/examples/playbooks/noqa-nested.yml @@ -0,0 +1,9 @@ +--- +- hosts: localhost + tasks: + - name: Example of multi-level block + block: + - name: 2nd level + block: + - ansible.builtin.debug: # noqa: unnamed-task + msg: "test unnamed task in block" diff --git a/examples/playbooks/noqa.yml b/examples/playbooks/noqa.yml new file mode 100644 index 0000000..a69c263 --- /dev/null +++ b/examples/playbooks/noqa.yml @@ -0,0 +1,6 @@ +--- +- hosts: localhost + tasks: + - name: This would typically fire latest[git] and partial-become + become_user: alice # noqa: latest[git] partial-become + git: src=/path/to/git/repo dest=checkout diff --git a/examples/playbooks/norole.yml b/examples/playbooks/norole.yml new file mode 100644 index 0000000..2ee7a83 --- /dev/null +++ b/examples/playbooks/norole.yml @@ -0,0 +1,5 @@ +--- +- hosts: + - localhost + roles: + - name: node diff --git a/examples/playbooks/norole2.yml b/examples/playbooks/norole2.yml new file mode 100644 index 0000000..2ee7a83 --- /dev/null +++ b/examples/playbooks/norole2.yml @@ -0,0 +1,5 @@ +--- +- hosts: + - localhost + roles: + - name: node diff --git a/examples/playbooks/package-check-failure.yml b/examples/playbooks/package-check-failure.yml new file mode 100644 index 0000000..393b52b --- /dev/null +++ b/examples/playbooks/package-check-failure.yml @@ -0,0 +1,21 @@ +--- +- hosts: localhost + tasks: + - name: Install ansible + ansible.builtin.yum: name=ansible state=latest + + - name: Install ansible-lint + ansible.builtin.pip: name=ansible-lint + args: + state: latest + + - name: Install some-package + ansible.builtin.package: + name: some-package + state: latest + + - name: Install ansible with update_only to false + ansible.builtin.yum: + name: sudo + state: latest + update_only: false diff --git a/examples/playbooks/package-check-success.yml b/examples/playbooks/package-check-success.yml new file mode 100644 index 0000000..a513d5d --- /dev/null +++ b/examples/playbooks/package-check-success.yml @@ -0,0 +1,22 @@ +--- +- hosts: localhost + tasks: + - name: Install ansible + ansible.builtin.yum: name=ansible-2.1.0.0 state=present + + - name: Install ansible-lint + ansible.builtin.pip: name=ansible-lint + args: + state: present + version: 3.1.2 + + - name: Install some-package + ansible.builtin.package: + name: Some-package + state: present + + - name: Update ansible + ansible.builtin.yum: + name: sudo + state: latest + update_only: true diff --git a/examples/playbooks/pass-loop-var-prefix.yml b/examples/playbooks/pass-loop-var-prefix.yml new file mode 100644 index 0000000..8f33399 --- /dev/null +++ b/examples/playbooks/pass-loop-var-prefix.yml @@ -0,0 +1,18 @@ +--- +- hosts: localhost + tasks: + # validate that we did not trigger loop-var-prefix on playbooks + - name: That should pass + ansible.builtin.debug: + var: item + loop: + - foo + - bar + - name: A block + block: + - name: That should also pass + ansible.builtin.debug: + var: item + loop: + - apples + - oranges diff --git a/examples/playbooks/play-without-extension b/examples/playbooks/play-without-extension new file mode 100644 index 0000000..e056ab5 --- /dev/null +++ b/examples/playbooks/play-without-extension @@ -0,0 +1,2 @@ +- name: A playbook without expected yml or yaml extension + hosts: localhost diff --git a/examples/playbooks/play.yml b/examples/playbooks/play.yml new file mode 100644 index 0000000..40f387f --- /dev/null +++ b/examples/playbooks/play.yml @@ -0,0 +1,6 @@ +--- +- hosts: bobbins + + tasks: + - name: A bad play + action: ansible.builtin.command service blah restart diff --git a/examples/playbooks/play_miss_include.yml b/examples/playbooks/play_miss_include.yml new file mode 100644 index 0000000..f402c41 --- /dev/null +++ b/examples/playbooks/play_miss_include.yml @@ -0,0 +1,5 @@ +--- +- name: Fixture + hosts: all + roles: + - include_miss diff --git a/examples/playbooks/playbook-imported.yml b/examples/playbooks/playbook-imported.yml new file mode 100644 index 0000000..d5f4ae8 --- /dev/null +++ b/examples/playbooks/playbook-imported.yml @@ -0,0 +1,11 @@ +--- +- name: Fixture + hosts: localhost + connection: local + gather_facts: false + tasks: + - ansible.builtin.command: + cmd: echo "no name" # should generate unnamed-task + - name: Another task + ansible.builtin.debug: + msg: debug message diff --git a/examples/playbooks/playbook-parent.yml b/examples/playbooks/playbook-parent.yml new file mode 100644 index 0000000..b7d0edd --- /dev/null +++ b/examples/playbooks/playbook-parent.yml @@ -0,0 +1,9 @@ +--- +- name: Importing another playbook + ansible.builtin.import_playbook: playbook-imported.yml + +- name: Importing playbook from collection + import_playbook: community.molecule.validate # noqa: fqcn + +- name: Importing playbook using jinja2 + import_playbook: "{{ 'community.molecule.validate' }}" # noqa: fqcn diff --git a/examples/playbooks/roles b/examples/playbooks/roles new file mode 120000 index 0000000..d8c4472 --- /dev/null +++ b/examples/playbooks/roles @@ -0,0 +1 @@ +../roles
\ No newline at end of file diff --git a/examples/playbooks/rule-args-module-fail-1.yml b/examples/playbooks/rule-args-module-fail-1.yml new file mode 100644 index 0000000..4c9b51e --- /dev/null +++ b/examples/playbooks/rule-args-module-fail-1.yml @@ -0,0 +1,36 @@ +--- +- name: Fixture to validate module options failure scenarios + hosts: localhost + tasks: + - name: Clone content repository + # module should produce: 'missing required arguments: repo' + ansible.builtin.git: + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + # module should produce: 'missing parameter(s) required by 'enabled': name' + ansible.builtin.systemd: + enabled: true + masked: false + + - name: Enable service httpd and ensure it is not masked + # module should produce: 'Unsupported parameters for ansible.builtin.systemd module: foo. Supported parameters include: no_block, state, daemon_reload (daemon-reload), name (service, unit), force, masked, daemon_reexec (daemon-reexec), scope, enabled.' + ansible.builtin.systemd: + foo: true + + - name: An invalid call of setup module + # setup module in ansible is the only module that has a .. relative + # import that used to cause problems + ansible.builtin.setup: + foo: # this is a nested object which will have the __ injections + # that we later need to clean + bar: true + + - name: Remove deployment dir + # module should produce: 'value of state must be one of: absent, directory, file, hard, link, touch, got: away' + ansible.builtin.file: + path: /opt/software/deployment + state: away diff --git a/examples/playbooks/rule-args-module-pass-1.yml b/examples/playbooks/rule-args-module-pass-1.yml new file mode 100644 index 0000000..872cd39 --- /dev/null +++ b/examples/playbooks/rule-args-module-pass-1.yml @@ -0,0 +1,26 @@ +--- +- name: Fixture to validate module options pass scenario + hosts: localhost + tasks: + - name: Clone content repository + ansible.builtin.git: + repo: "{{ archive_services_repo_url }}" + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + ansible.builtin.systemd: + name: httpd + enabled: false + masked: false + + - name: Clear deployment dir + ansible.builtin.file: + path: /opt/software/deployment + state: "{{ item }}" + mode: "0755" + with_items: + - absent + - directory diff --git a/examples/playbooks/rule-avoid-implicit-fail.yml b/examples/playbooks/rule-avoid-implicit-fail.yml new file mode 100644 index 0000000..ceaaeb4 --- /dev/null +++ b/examples/playbooks/rule-avoid-implicit-fail.yml @@ -0,0 +1,9 @@ +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + ansible.builtin.copy: + content: { "foo": "bar" } # <-- avoid-implicit[copy-content] + dest: /tmp/foo.txt + mode: "0600" diff --git a/examples/playbooks/rule-avoid-implicit-pass.yml b/examples/playbooks/rule-avoid-implicit-pass.yml new file mode 100644 index 0000000..248fc8c --- /dev/null +++ b/examples/playbooks/rule-avoid-implicit-pass.yml @@ -0,0 +1,9 @@ +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + ansible.builtin.copy: + content: "Some {{ foo }}" + dest: /tmp/foo.txt + mode: "0600" diff --git a/examples/playbooks/rule-deprecated-bare-vars-fail.yml b/examples/playbooks/rule-deprecated-bare-vars-fail.yml new file mode 100644 index 0000000..7091f46 --- /dev/null +++ b/examples/playbooks/rule-deprecated-bare-vars-fail.yml @@ -0,0 +1,97 @@ +--- +- name: Fixture for deprecated-bare-vars rule + hosts: localhost + become: false + vars: + my_list: + - foo + - bar + + my_list2: + - 1 + - 2 + + my_list_of_dicts: + - foo: 1 + bar: 2 + - foo: 3 + bar: 4 + + my_list_of_lists: + - "{{ my_list }}" + - "{{ my_list2 }}" + + my_filenames: + - foo.txt + - bar.txt + + my_dict: + foo: bar + + tasks: + - name: Use with_items loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_items: my_list + + - name: Use with_dict loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_dict: my_dict + + ### Testing with_dict with a default empty dictionary + - name: Use with_dict loop using variable and default + ansible.builtin.debug: + msg: "{{ item.key }} - {{ item.value }}" + with_dict: uwsgi_ini | default({}) + + - name: Use with_nested loop using bare variable + ansible.builtin.debug: + msg: "{{ item.0 }} {{ item.1 }}" + with_nested: + - my_list + - "{{ my_list2 }}" + + - name: Use with_nested loop using bare variable + ansible.builtin.debug: + msg: "{{ item.0 }} {{ item.1 }}" + with_nested: "my_list2" + + - name: Use with_file loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_file: my_list + + - name: Use with_fileglob loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_fileglob: my_list + + - name: Use with_together loop using bare variable + ansible.builtin.debug: + msg: "{{ item.0 }} {{ item.1 }}" + with_together: + - my_list + - "{{ my_list2 }}" + + - name: Use with_subelements loop using bare variable + ansible.builtin.debug: + msg: "{{ item.0 }}" + with_subelements: + - my_list_of_dicts + - bar + + - name: Use with_random_choice loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_random_choice: my_list + + - name: Use with_first_found loop using bare variable + ansible.builtin.debug: + msg: "{{ item }}" + with_first_found: my_filenames + + - name: Use with_indexed_items loop + ansible.builtin.debug: + msg: "{{ item.0 }} {{ item.1 }}" + with_indexed_items: my_list diff --git a/examples/playbooks/rule-deprecated-bare-vars-pass.yml b/examples/playbooks/rule-deprecated-bare-vars-pass.yml new file mode 100644 index 0000000..c7e6521 --- /dev/null +++ b/examples/playbooks/rule-deprecated-bare-vars-pass.yml @@ -0,0 +1,168 @@ +--- +- name: Using bare variables success + hosts: localhost + become: false + vars: + my_list: + - foo + - bar + + my_list2: + - 1 + - 2 + + my_list_of_dicts: + - foo: 1 + bar: 2 + - foo: 3 + bar: 4 + + my_list_of_lists: + - "{{ my_list }}" + - "{{ my_list2 }}" + + my_filenames: + - foo.txt + - bar.txt + + my_dict: + foo: bar + + tasks: + ### Testing with_items loops + - name: Use with_items loop using static list + ansible.builtin.debug: + msg: "{{ item }}" + with_items: + - foo + - bar + + - name: Use with_items using a static hash + ansible.builtin.debug: + msg: "{{ item.key }} - {{ item.value }}" + with_items: + - { key: foo, value: 1 } + - { key: bar, value: 2 } + + - name: Use with_items loop using variable + ansible.builtin.debug: + msg: "{{ item }}" + with_items: "{{ my_list }}" + + ### Testing with_nested loops + - name: Use with_nested loop using static lists + ansible.builtin.debug: + msg: "{{ item[0] }} - {{ item[1] }}" + with_nested: + - [foo, bar] + - ["1", "2", "3"] + + - name: Use with_nested loop using variable list and static + ansible.builtin.debug: + msg: "{{ item[0] }} - {{ item[1] }}" + with_nested: + - "{{ my_list }}" + - ["1", "2", "3"] + + ### Testing with_dict + - name: Use with_dict loop using variable + ansible.builtin.debug: + msg: "{{ item.key }} - {{ item.value }}" + with_dict: "{{ my_dict }}" + + ### Testing with_dict with a default empty dictionary + - name: Use with_dict loop using variable and default + ansible.builtin.debug: + msg: "{{ item.key }} - {{ item.value }}" + with_dict: "{{ uwsgi_ini | default({}) }}" + + ### Testing with_file + - name: Use with_file loop using static files list + ansible.builtin.debug: + msg: "{{ item }}" + with_file: + - foo.txt + - bar.txt + + - name: Use with_file loop using list of filenames + ansible.builtin.debug: + msg: "{{ item }}" + with_file: "{{ my_filenames }}" + + ### Testing with_fileglob + - name: Use with_fileglob loop using list of *.txt + ansible.builtin.debug: + msg: "{{ item }}" + with_fileglob: + - "*.txt" + + ### Testing non-list form of with_fileglob + - name: Use with_fileglob loop using single value *.txt + ansible.builtin.debug: + msg: "{{ item }}" + with_fileglob: "*.txt" + + ### Testing non-list form of with_fileglob with trailing templated pattern + - name: Use with_fileglob loop using templated pattern + ansible.builtin.debug: + msg: "{{ item }}" + with_fileglob: foo{{ glob }} + + ### Testing with_together + - name: Use with_together loop using variable lists + ansible.builtin.debug: + msg: "{{ item.0 }} - {{ item.1 }}" + with_together: + - "{{ my_list }}" + - "{{ my_list2 }}" + + - name: Use with_subelements loop + ansible.builtin.debug: + msg: "{{ item }}" + with_subelements: + - "{{ my_list_of_dicts }}" + - bar + + - name: Use with_sequence loop + ansible.builtin.debug: + msg: "{{ item }}" + with_sequence: count=2 + + - name: Use with_random_choice loop + ansible.builtin.debug: + msg: "{{ item }}" + with_random_choice: "{{ my_list }}" + + - name: Use with_first_found loop with static files list + ansible.builtin.debug: + msg: "{{ item }}" + with_first_found: + - foo.txt + - bar.txt + + - name: Use with_first_found loop with list of filenames + ansible.builtin.debug: + msg: "{{ item }}" + with_first_found: "{{ my_filenames }}" + + - name: Use with_indexed_items loop + ansible.builtin.debug: + msg: "{{ item.0 }} {{ item.1 }}" + with_indexed_items: "{{ my_list }}" + + - name: Use with_ini loop + ansible.builtin.debug: + msg: "{{ item }}" + with_ini: value[1-2] section=section1 file=foo.ini re=true + + - name: Use with_inventory_hostnames loop + ansible.builtin.debug: + msg: "{{ item }}" + with_inventory_hostnames: all + + - name: Test more complex jinja is also allowed + ansible.builtin.debug: + msg: "{{ item }}" + with_items: >- + {%- set ns = [1, 1, 2] -%} + {{- ns.keys | unique -}} diff --git a/examples/playbooks/rule-fqcn-fail.yml b/examples/playbooks/rule-fqcn-fail.yml new file mode 100644 index 0000000..8015cc2 --- /dev/null +++ b/examples/playbooks/rule-fqcn-fail.yml @@ -0,0 +1,14 @@ +--- +- name: Fixture + hosts: localhost + collections: + - community.general + tasks: + - name: Shell (fqcn[action-core]) # noqa: command-instead-of-shell + shell: echo This rule should get matched by the fqcn rule + changed_when: false + - name: Shell (fqcn[action]) + ini_file: + section: foo + path: /tmp/test.ini + mode: "0644" diff --git a/examples/playbooks/rule-fqcn-pass.yml b/examples/playbooks/rule-fqcn-pass.yml new file mode 100644 index 0000000..eafd653 --- /dev/null +++ b/examples/playbooks/rule-fqcn-pass.yml @@ -0,0 +1,13 @@ +--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell (fqcn) # noqa: command-instead-of-shell + changed_when: false + ansible.builtin.shell: echo This rule should not get matched by the fqcn rule + - name: Use FQCN with more than 3 parts + community.general.system.sudoers: + name: should-not-be-here + state: absent + - name: Command with legacy FQCN + ansible.legacy.command: echo This rule should not get matched by the fqcn rule diff --git a/examples/playbooks/rule-jinja-invalid.yml b/examples/playbooks/rule-jinja-invalid.yml new file mode 100644 index 0000000..c7bf5ba --- /dev/null +++ b/examples/playbooks/rule-jinja-invalid.yml @@ -0,0 +1,12 @@ +--- +- name: Fixture + hosts: localhost + tasks: + - name: A block used to check that we do not identify error at correct level + block: + - name: Foo # <-- this is valid jinja2 + ansible.builtin.debug: + foo: "{{ 1 }}" # <-- jinja2[spacing] + msg: "{{ 'a' b }}" # <-- jinja2[invalid] +# It should be noted that even ansible --syntax-check fails to spot the jinja +# error above, but ansible will throw a runtime error when running diff --git a/examples/playbooks/rule-jinja-valid.yml b/examples/playbooks/rule-jinja-valid.yml new file mode 100644 index 0000000..00ce00f --- /dev/null +++ b/examples/playbooks/rule-jinja-valid.yml @@ -0,0 +1,48 @@ +--- +# https://github.com/ansible/ansible-lint/issues/2464 +# https://github.com/ansible/ansible-lint/issues/2462 +# https://github.com/ansible/ansible-lint/issues/2459 +- name: Fixture to test various jinja parsing bugs that we should ignore + hosts: localhost + tasks: + - name: Foo {{ buildset_registry.host | ipwrap }} + ansible.builtin.debug: + msg: "{{ lookup('template', 'lookup/redis_server__env_ports.j2') | from_yaml }}" + loop: "{{ github_release_query.results | subelements('json.assets', {'skip_missing': True}) }}" + - name: Zoo + ansible.builtin.debug: + msg: "{{ lookup('ansible.builtin.ini', 'SOME_VAR', type='properties', file='/tmp/some-file') }}" + + - name: Generate Dovecot main configuration file + ansible.builtin.template: + src: '{{ lookup("template_src", "etc/dovecot/dovecot.conf.j2") }}' + dest: "/etc/dovecot/dovecot.conf" + owner: "root" + group: "dovecot" + mode: "0640" + - name: Bug https://github.com/ansible/ansible-lint/issues/2569 + ansible.builtin.include_tasks: + file: /dev/null + vars: + ns_vars: {} + x: "{{ lookup('ansible.builtin.template', 'namespace.yaml.j2', template_vars=ns_vars) | from_yaml }}" + +# https://github.com/ansible/ansible-lint/issues/2697 +- name: Test linter + hosts: localhost + gather_facts: false + tasks: + - name: Passed linter + ansible.builtin.debug: + msg: "{{ test | to_json }}" + vars: + test: + one: two + param: "{{ ansible_host }}" + - name: Failed linter + ansible.builtin.debug: + msg: "{{ test | to_json }}" + vars: + test: + one: two + param: no jinja diff --git a/examples/playbooks/rule-key-order-fail.yml b/examples/playbooks/rule-key-order-fail.yml new file mode 100644 index 0000000..a83514a --- /dev/null +++ b/examples/playbooks/rule-key-order-fail.yml @@ -0,0 +1,30 @@ +--- +- name: Fixture + hosts: localhost + tasks: + - no_log: true + ansible.builtin.command: echo hello + name: Task with no_log on top + changed_when: false + - when: true + name: Task with when on top + ansible.builtin.command: echo hello + changed_when: false + - delegate_to: localhost + name: Delegate_to on top + ansible.builtin.command: echo hello + changed_when: false + - loop: + - 1 + - 2 + name: Loopy + ansible.builtin.command: echo {{ item }} + changed_when: false + - become: true + name: Become first + ansible.builtin.command: echo hello + changed_when: false + - register: test + ansible.builtin.command: echo hello + name: Register first + changed_when: false diff --git a/examples/playbooks/rule-name-casing.yml b/examples/playbooks/rule-name-casing.yml new file mode 100644 index 0000000..1b2efb8 --- /dev/null +++ b/examples/playbooks/rule-name-casing.yml @@ -0,0 +1,10 @@ +--- +- name: Fixture for src/ansiblelint/rules/name.py::test_rule_name_lowercase + hosts: all + tasks: + - name: this task has a name is not correctly capitalized + ansible.builtin.command: echo "Hello World" + changed_when: false + - name: 测试 should not trigger name[case] rule! + ansible.builtin.command: echo "Hello World" + changed_when: false diff --git a/examples/playbooks/rule-name-missing-fail.yml b/examples/playbooks/rule-name-missing-fail.yml new file mode 100644 index 0000000..4ea543e --- /dev/null +++ b/examples/playbooks/rule-name-missing-fail.yml @@ -0,0 +1,11 @@ +--- +- hosts: all # <-- name[missing] + tasks: + - ansible.builtin.command: echo "no name" # <-- name[missing] + changed_when: false + - name: "" # <-- name[missing] + ansible.builtin.command: echo "empty name" + changed_when: false + - ansible.builtin.debug: # <-- name[missing] + msg: Debug without a name + - ansible.builtin.meta: flush_handlers # <-- name[missing] diff --git a/examples/playbooks/rule-name-missing-pass.yml b/examples/playbooks/rule-name-missing-pass.yml new file mode 100644 index 0000000..965a80a --- /dev/null +++ b/examples/playbooks/rule-name-missing-pass.yml @@ -0,0 +1,12 @@ +--- +- name: Play for testing name[missing] rule + hosts: all + tasks: + - name: This task has a name + ansible.builtin.command: echo "Hello World" # noqa: no-free-form + changed_when: false + - name: Debug task with name + ansible.builtin.debug: msg="Hello World" # noqa: no-free-form + - name: Flush handler with name + ansible.builtin.meta: flush_handlers + changed_when: false diff --git a/examples/playbooks/rule-name-play-fail.yml b/examples/playbooks/rule-name-play-fail.yml new file mode 100644 index 0000000..5e42fa0 --- /dev/null +++ b/examples/playbooks/rule-name-play-fail.yml @@ -0,0 +1,3 @@ +--- +- hosts: localhost # <-- name[missing] + tasks: [] diff --git a/examples/playbooks/rule-name-templated-fail.yml b/examples/playbooks/rule-name-templated-fail.yml new file mode 100644 index 0000000..8a4f156 --- /dev/null +++ b/examples/playbooks/rule-name-templated-fail.yml @@ -0,0 +1,13 @@ +--- +- name: Fixture for src/ansiblelint/rules/name.py::test_name_template( + hosts: all + tasks: + - name: This task {{ sampleService }} name is not correctly templated + ansible.builtin.command: echo "Hello World" + changed_when: false + - name: This task is correctly templated {{ sampleService }} + ansible.builtin.command: echo "Hello World" + changed_when: false + - name: This task is correctly templated '{{ sampleService }}' + ansible.builtin.command: echo "Hello World" + changed_when: false diff --git a/examples/playbooks/rule-no-changed-when-fail.yml b/examples/playbooks/rule-no-changed-when-fail.yml new file mode 100644 index 0000000..1f994b8 --- /dev/null +++ b/examples/playbooks/rule-no-changed-when-fail.yml @@ -0,0 +1,15 @@ +--- +- name: Fixture for no-changed-when (fail with 3 occurrences) + hosts: all + tasks: + - name: Register command output, but cat still does not change anything + ansible.builtin.command: cat {{ my_file | quote }} + register: my_output + - name: Block level 1 + block: + - name: Block level 2 + block: + - name: Basic command task, should fail + ansible.builtin.command: cat my_file + - name: Basic shell task, should fail + shell: cat my_file # noqa: fqcn command-instead-of-shell diff --git a/examples/playbooks/rule-no-changed-when-pass.yml b/examples/playbooks/rule-no-changed-when-pass.yml new file mode 100644 index 0000000..9681e06 --- /dev/null +++ b/examples/playbooks/rule-no-changed-when-pass.yml @@ -0,0 +1,23 @@ +--- +- name: Fixture for no-changed-when (pass) + hosts: all + tasks: + - name: Handle command output with return code # noqa: command-instead-of-shell + ansible.builtin.command: cat {{ my_file | quote }} + register: my_output + changed_when: my_output.rc != 0 + + - name: Handle shell output with return code # noqa: command-instead-of-shell + ansible.builtin.shell: cat {{ my_file | quote }} + register: my_output + changed_when: my_output.rc != 0 + + - name: Handle shell output with false changed_when # noqa: command-instead-of-shell + ansible.builtin.shell: cat {{ my_file | quote }} + register: my_output + changed_when: false + + - name: Command with argument + command: createfile.sh # noqa: fqcn + args: + creates: /tmp/????unknown_files???? diff --git a/examples/playbooks/rule-no-free-form-fail.yml b/examples/playbooks/rule-no-free-form-fail.yml new file mode 100644 index 0000000..f5bcb08 --- /dev/null +++ b/examples/playbooks/rule-no-free-form-fail.yml @@ -0,0 +1,10 @@ +--- +- name: Example with discouraged free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: chdir=/tmp touch foo # <-- don't use shorthand + changed_when: false + - name: Use raw to echo + ansible.builtin.raw: executable=/bin/bash echo foo # <-- don't use executable= + changed_when: false diff --git a/examples/playbooks/rule-no-free-form-pass.yml b/examples/playbooks/rule-no-free-form-pass.yml new file mode 100644 index 0000000..36d359d --- /dev/null +++ b/examples/playbooks/rule-no-free-form-pass.yml @@ -0,0 +1,19 @@ +--- +- name: Example with discouraged free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: + cmd: touch foo + chdir: /tmp + changed_when: false + - name: Use raw to echo + ansible.builtin.raw: echo foo + args: + executable: /bin/bash + changed_when: false + - name: Configure locale + # https://github.com/ansible/ansible-lint/issues/2573 + ansible.builtin.command: localectl set-locale LANG=en_GB.UTF-8 + when: not ansible_check_mode + changed_when: false diff --git a/examples/playbooks/rule-no-handler-fail.yml b/examples/playbooks/rule-no-handler-fail.yml new file mode 100644 index 0000000..3bc23a3 --- /dev/null +++ b/examples/playbooks/rule-no-handler-fail.yml @@ -0,0 +1,15 @@ +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: "0600" + register: result # <-- we register the result of the task + + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! + when: result.changed # <-- this triggers no-handler rule diff --git a/examples/playbooks/rule-no-handler-pass.yml b/examples/playbooks/rule-no-handler-pass.yml new file mode 100644 index 0000000..af6a726 --- /dev/null +++ b/examples/playbooks/rule-no-handler-pass.yml @@ -0,0 +1,15 @@ +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: "0600" + notify: + - Second command to run # <-- handler will run only when file is changed + handlers: + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! diff --git a/examples/playbooks/rule-no-jinja-when-fail.yml b/examples/playbooks/rule-no-jinja-when-fail.yml new file mode 100644 index 0000000..15778ea --- /dev/null +++ b/examples/playbooks/rule-no-jinja-when-fail.yml @@ -0,0 +1,21 @@ +--- +- name: One + hosts: all + tasks: + - name: Test when with jinja2 # noqa: jinja[spacing] + ansible.builtin.debug: + msg: text + when: "{{ false }}" + +- name: Two + hosts: all + roles: + - role: test + when: "{{ '1' = '1' }}" + +- name: Three + hosts: all + roles: + - role: test + when: + - "{{ '1' = '1' }}" diff --git a/examples/playbooks/rule-no-jinja-when-pass.yml b/examples/playbooks/rule-no-jinja-when-pass.yml new file mode 100644 index 0000000..b9d02f5 --- /dev/null +++ b/examples/playbooks/rule-no-jinja-when-pass.yml @@ -0,0 +1,17 @@ +--- +- name: Test fixture for no-jinja-when + hosts: all + tasks: + - name: Test when + ansible.builtin.debug: + msg: text + when: true + - name: Test when 2 + ansible.builtin.debug: + msg: text2 + when: 1 = 1 + - name: Three + ansible.builtin.debug: + msg: text2 + when: + - "false" diff --git a/examples/playbooks/rule-no-prompting.yml b/examples/playbooks/rule-no-prompting.yml new file mode 100644 index 0000000..5f74303 --- /dev/null +++ b/examples/playbooks/rule-no-prompting.yml @@ -0,0 +1,27 @@ +--- +- name: Fixture for testing no-prompting rule, lines 5,17 + hosts: all + vars_prompt: + - name: username + prompt: What is your username? + private: false + + - name: password + prompt: What is your password? + + tasks: + - name: Pause for 5 minutes to build app cache + ansible.builtin.pause: + minutes: 5 # should not trigger because minutes is mentioned + + - name: A helpful reminder of what to look out for post-update + ansible.builtin.pause: + # should trigger no-prompting as neither seconds/minutes are mentioned + prompt: "Make sure org.foo.FooOverload exception is not present" + +- name: Fixture for testing if no vars_prompt is provided, lines 34-36 + hosts: all + tasks: + - name: Pause for 5 minutes to build app cache + ansible.builtin.pause: + minutes: 5 # should not trigger because minutes is mentioned diff --git a/examples/playbooks/rule-no-tabs.yml b/examples/playbooks/rule-no-tabs.yml new file mode 100644 index 0000000..422d1d8 --- /dev/null +++ b/examples/playbooks/rule-no-tabs.yml @@ -0,0 +1,17 @@ +--- +- name: Fixture for no-tabs rule + hosts: localhost + tasks: + - name: Should not trigger no-tabs rules + ansible.builtin.lineinfile: + path: some.txt + regexp: ^\t$ + line: string with \t inside + - name: Foo + ansible.builtin.debug: + msg: "Presence of \t should trigger no-tabs here." + - name: Should not trigger no-tabs rules # noqa fqcn + lineinfile: + path: some.txt + regexp: ^\t$ + line: string with \t inside diff --git a/examples/playbooks/rule-only-builtins.yml b/examples/playbooks/rule-only-builtins.yml new file mode 100644 index 0000000..18f0f04 --- /dev/null +++ b/examples/playbooks/rule-only-builtins.yml @@ -0,0 +1,12 @@ +--- +- name: Fixture for examples/playbooks/rule-only-builtins.yml + hosts: localhost + tasks: + - name: Sysctl # noqa: args[module] + # while next module is mocked in our config, we still want to see that + # only-builtins rules gets matched + fake_namespace.fake_collection.fake_module: + name: vm.swappiness + value: "5" + - name: Some task # noqa: args[module] + zuul_return: {} diff --git a/examples/playbooks/rule-partial-become-without-become-fail.yml b/examples/playbooks/rule-partial-become-without-become-fail.yml new file mode 100644 index 0000000..da48b2f --- /dev/null +++ b/examples/playbooks/rule-partial-become-without-become-fail.yml @@ -0,0 +1,28 @@ +--- +- hosts: localhost + name: Use of become_user without become play + become_user: root + + tasks: + - ansible.builtin.debug: + msg: hello + +- hosts: localhost + + tasks: + - name: Use of become_user without become task + ansible.builtin.command: whoami + become_user: postgres + changed_when: false + +- hosts: localhost + + tasks: + - name: A block with become and become_user on different tasks + block: + - name: Sample become + become: true + ansible.builtin.command: whoami + - name: Sample become_user + become_user: postgres + ansible.builtin.command: whoami diff --git a/examples/playbooks/rule-partial-become-without-become-pass.yml b/examples/playbooks/rule-partial-become-without-become-pass.yml new file mode 100644 index 0000000..e1ae189 --- /dev/null +++ b/examples/playbooks/rule-partial-become-without-become-pass.yml @@ -0,0 +1,35 @@ +--- +- hosts: localhost + become_user: root + become: true + + tasks: + - ansible.builtin.debug: + msg: hello + +- hosts: localhost + + tasks: + - name: Foo + ansible.builtin.command: whoami + become_user: postgres + become: true + changed_when: false + +- hosts: localhost + become: true + + tasks: + - name: Accepts a become from higher scope + ansible.builtin.command: whoami + become_user: postgres + changed_when: false + +- hosts: localhost + become_user: postgres + + tasks: + - name: Accepts a become from a lower scope + ansible.builtin.command: whoami + become: true + changed_when: false diff --git a/examples/playbooks/rule-risky-file-permissions-fail.yml b/examples/playbooks/rule-risky-file-permissions-fail.yml new file mode 100644 index 0000000..02c551b --- /dev/null +++ b/examples/playbooks/rule-risky-file-permissions-fail.yml @@ -0,0 +1,88 @@ +# Fixture for RiskyFilePermissionsRule should return 11 occurrences +--- +- name: FAIL_INI_PRESERVE + hosts: all + tasks: + - name: Ini_file does not accept preserve mode + community.general.ini_file: + path: foo + create: true + mode: preserve + +- name: FAIL_INI_PERMISSION + hosts: all + tasks: + - name: Permissions needed if create is used + community.general.ini_file: + path: foo + create: true + +- name: FAIL_PRESERVE_MODE + hosts: all + tasks: + - name: File does not allow preserve value for mode + ansible.builtin.file: + path: foo + mode: preserve + +- name: FAIL_MISSING_PERMISSIONS_TOUCH + hosts: all + tasks: + - name: Permissions missing and might create file + file: + path: foo + state: touch + - name: Permissions missing and might create file (fqcn) + ansible.builtin.file: + path: foo + state: touch + +- name: FAIL_MISSING_PERMISSIONS_DIRECTORY + hosts: all + tasks: + - name: Permissions missing and might create directory + file: + path: foo + state: directory + - name: Lineinfile when create is true (fqcn) + ansible.builtin.lineinfile: + path: foo + create: true + line: some content here + +- name: FAIL_MISSING_PERMISSIONS_GET_URL + hosts: all + tasks: + - name: Permissions missing + # noqa: fqcn[action-core] + get_url: + url: http://foo + dest: foo + +- name: FAIL_LINEINFILE_CREATE + hosts: all + tasks: + - name: Lineinfile when create is true + ansible.builtin.lineinfile: + path: foo + create: true + line: some content here + +- name: FAIL_REPLACE_PRESERVE + hosts: all + tasks: + - name: Replace does not allow preserve mode + replace: + path: foo + mode: preserve + +- name: FAIL_PERMISSION_COMMENT + hosts: all + tasks: + - name: Permissions is only a comment + file: + path: foo + owner: root + group: root + state: directory + # mode: 0755 diff --git a/examples/playbooks/rule-risky-file-permissions-pass.yml b/examples/playbooks/rule-risky-file-permissions-pass.yml new file mode 100644 index 0000000..656b1d4 --- /dev/null +++ b/examples/playbooks/rule-risky-file-permissions-pass.yml @@ -0,0 +1,75 @@ +# Fixture for RiskyFilePermissionsRule should pass +--- +- name: SUCCESS_PERMISSIONS_PRESENT + hosts: all + tasks: + - name: Permissions not missing and numeric + ansible.builtin.file: + path: foo + mode: "0600" + +- name: SUCCESS_PERMISSIONS_PRESENT_GET_URL + hosts: all + tasks: + - name: Permissions not missing and numeric + ansible.builtin.get_url: + url: http://foo + dest: foo + mode: "0600" + +- name: SUCCESS_ABSENT_STATE + hosts: all + tasks: + - name: Permissions missing while state is absent is fine + ansible.builtin.file: + path: foo + state: absent + +- name: SUCCESS_DEFAULT_STATE + hosts: all + tasks: + - name: Permissions missing while state is file (default) is fine + ansible.builtin.file: + path: foo + +- name: SUCCESS_LINK_STATE + hosts: all + tasks: + - name: Permissions missing while state is link is fine + ansible.builtin.file: + path: foo2 + src: foo + state: link + +- name: SUCCESS_CREATE_FALSE + hosts: all + tasks: + - name: File edit when create is false + ansible.builtin.lineinfile: + path: foo + create: false + line: some content here + +- name: SUCCESS_REPLACE + hosts: all + tasks: + - name: Replace should not require mode + ansible.builtin.replace: + path: foo + +- name: SUCCESS_RECURSE + hosts: all + tasks: + - name: File with recursive does not require mode + ansible.builtin.file: + state: directory + recurse: true + - name: Permissions not missing and numeric (fqcn) + ansible.builtin.file: + path: bar + mode: "755" # noqa: risky-octal + - name: File edit when create is false (fqcn) + ansible.builtin.lineinfile: + path: foo + create: false + line: some content here diff --git a/examples/playbooks/rule-risky-octal-fail.yml b/examples/playbooks/rule-risky-octal-fail.yml new file mode 100644 index 0000000..9e4a3bf --- /dev/null +++ b/examples/playbooks/rule-risky-octal-fail.yml @@ -0,0 +1,25 @@ +--- +- name: Fixture for testing risky-octal rule + hosts: hosts + vars: + varset: varset + tasks: + - name: Octal permissions test fail (600) + ansible.builtin.file: + path: foo + mode: 600 + + - name: Octal permissions test fail (710) + ansible.builtin.file: + path: foo + mode: 710 + + - name: Octal permissions test fail (123) + ansible.builtin.file: + path: foo + mode: 123 + + - name: Octal permissions test fail (2000) + ansible.builtin.file: + path: bar + mode: 2000 diff --git a/examples/playbooks/rule-risky-octal-pass.yml b/examples/playbooks/rule-risky-octal-pass.yml new file mode 100644 index 0000000..b900990 --- /dev/null +++ b/examples/playbooks/rule-risky-octal-pass.yml @@ -0,0 +1,42 @@ +--- +- name: Fixture for testing risky-octal rule + hosts: hosts + vars: + varset: varset + tags: + - no-free-form + tasks: + - name: Octal permissions test success (0600) + ansible.builtin.file: + path: foo + mode: "0600" + + - name: Octal permissions test success (0000) + ansible.builtin.file: + path: foo + mode: "0000" + + - name: Octal permissions test success (02000) + ansible.builtin.file: + path: bar + mode: "02000" + + - name: Octal permissions test success (02751) + ansible.builtin.file: + path: bar + mode: "02751" + + - name: Octal permissions test success (0777) + ansible.builtin.file: path=baz mode=0777 # noqa: no-free-form + + - name: Octal permissions test success (0711) + ansible.builtin.file: path=baz mode=0711 # noqa: no-free-form + + - name: Permissions test success (0777) + ansible.builtin.file: path=baz mode=u+rwx # noqa: no-free-form + + - name: Octal permissions test success (777) + ansible.builtin.file: path=baz mode=777 # noqa: no-free-form + + - name: Octal permissions test success (733) + ansible.builtin.file: path=baz mode=733 # noqa: no-free-form diff --git a/examples/playbooks/rule-role-name-path.yml b/examples/playbooks/rule-role-name-path.yml new file mode 100644 index 0000000..5fedae8 --- /dev/null +++ b/examples/playbooks/rule-role-name-path.yml @@ -0,0 +1,10 @@ +--- +- name: Fixture for role-name[path] rule + hosts: localhost + roles: + - subfolder/1st_role # 1st + - role: subfolder/2nd_role # 2nd + tasks: + - name: Another + ansible.builtin.import_role: + name: subfolder/3rd_role # 3rd diff --git a/examples/playbooks/rule-syntax-moves.yml b/examples/playbooks/rule-syntax-moves.yml new file mode 100644 index 0000000..7cba53b --- /dev/null +++ b/examples/playbooks/rule-syntax-moves.yml @@ -0,0 +1,16 @@ +--- +- name: Fixture for syntax[moves] + hosts: localhost + tasks: + - name: 1st + ansible.builtin.debug: + msg: "{{ item }}" + with_flattened: [] # moved to community.general in 2.10 + - name: 2nd + ansible.builtin.debug: + msg: "{{ item }}" + with_cartesian: [] # moved to community.general in 2.10 + - name: 3rd + ansible.builtin.debug: + msg: "{{ item }}" + with_filetree: [] # moved to community.general in 2.10 diff --git a/examples/playbooks/rule-validate-module-options-pass-2.yml b/examples/playbooks/rule-validate-module-options-pass-2.yml new file mode 100644 index 0000000..90b88b3 --- /dev/null +++ b/examples/playbooks/rule-validate-module-options-pass-2.yml @@ -0,0 +1,10 @@ +--- +- name: Fixture to validate module options action pass scenarios + hosts: localhost + tasks: + - name: Copy a new "ntp.conf" file into place with backup + ansible.builtin.copy: + src: /mine/ntp.conf + dest: /etc/ntp.conf + backup: true + mode: "0600" diff --git a/examples/playbooks/rule-var-naming-fail.yml b/examples/playbooks/rule-var-naming-fail.yml new file mode 100644 index 0000000..6d812e9 --- /dev/null +++ b/examples/playbooks/rule-var-naming-fail.yml @@ -0,0 +1,32 @@ +--- +- name: Fixture + hosts: localhost + vars: + CamelCaseIsBad: false # invalid + this_is_valid: # valid because content is a dict, not a variable + CamelCase: ... + ALL_CAPS: ... + ALL_CAPS_ARE_BAD_TOO: ... # invalid + CamelCaseButErrorIgnored: true # noqa: var-naming + + tasks: + - name: Foo + ansible.builtin.set_fact: + "{{ 'test_' }}var": "value" # valid + - name: Bar + ansible.builtin.set_fact: + CamelCaseButErrorIgnored: true # noqa: var-naming + - name: Test in a block + vars: + BAD: false # invalid + MoreBad: ... # invalid + block: + - name: Foo + vars: + ALL_CAPS_ARE_BAD_TOO: "{{ MoreBad }}" # invalid + ansible.builtin.set_fact: + CamelCaseIsBad: "{{ BAD }}" # invalid + - name: Test on register + ansible.builtin.debug: + var: test_var + register: CamelCaseIsBad # invalid diff --git a/examples/playbooks/rulebook.yml b/examples/playbooks/rulebook.yml new file mode 100644 index 0000000..3eaf308 --- /dev/null +++ b/examples/playbooks/rulebook.yml @@ -0,0 +1,21 @@ +--- +# That file is not a valid playbook but it is a valid rulebook that was +# mistakenly put under a playbook directory. +- name: Demo rules with kafka as source + hosts: localhost + sources: + - name: kafka + kafka: + topic: eda + host: localhost + port: 9092 + group_id: testing + rules: + - name: + condition: event.i is defined + action: + debug: + - name: + condition: event.stop == true + action: + shutdown: diff --git a/examples/playbooks/run-once-fail.yml b/examples/playbooks/run-once-fail.yml new file mode 100644 index 0000000..8963196 --- /dev/null +++ b/examples/playbooks/run-once-fail.yml @@ -0,0 +1,12 @@ +--- +- name: "Example with run_once" + hosts: all + # strategy: free # noqa: run-once[play] (Corrected code example) + strategy: free + gather_facts: false + tasks: + # - name: Task with run_once # noqa: run-once[task] (Corrected code example) + - name: Task with run_once + ansible.builtin.debug: + msg: "Test" + run_once: true # <-- avoid use of strategy as free at play level when using run_once at task level diff --git a/examples/playbooks/run-once-pass.yml b/examples/playbooks/run-once-pass.yml new file mode 100644 index 0000000..030a2a0 --- /dev/null +++ b/examples/playbooks/run-once-pass.yml @@ -0,0 +1,8 @@ +--- +- name: "Example without run_once" + hosts: all + gather_facts: false + tasks: + - name: Task without run_once + ansible.builtin.debug: + msg: "Test" diff --git a/examples/playbooks/schema-error-string.yml b/examples/playbooks/schema-error-string.yml new file mode 100644 index 0000000..00e5e40 --- /dev/null +++ b/examples/playbooks/schema-error-string.yml @@ -0,0 +1,4 @@ +--- +foo +# This file is valid YAML but from our point of view is an error, as is +# neither a Sequence or a Mapping. diff --git a/examples/playbooks/skiptasks.yml b/examples/playbooks/skiptasks.yml new file mode 100644 index 0000000..7a3b09d --- /dev/null +++ b/examples/playbooks/skiptasks.yml @@ -0,0 +1,64 @@ +--- +- hosts: all + + tasks: + - name: Test latest[git] + action: ansible.builtin.git + + - name: Test latest[hg] + action: ansible.builtin.hg + + - name: Test command-instead-of-module + ansible.builtin.command: git log + changed_when: false + + - name: Test deprecated-command-syntax + ansible.builtin.command: creates=B chmod 644 A + + - name: Test latest[git] (skip) + action: ansible.builtin.git + tags: + - skip_ansible_lint + + - name: Test latest[hg] (skip) + action: ansible.builtin.hg + tags: + - skip_ansible_lint + + - name: Test command-instead-of-module (skip) + ansible.builtin.command: git log + tags: + - skip_ansible_lint + + - name: Test deprecated-command-syntax (skip) + ansible.builtin.command: chmod 644 A + tags: + - skip_ansible_lint + + - name: Test latest[git] (don't warn) + ansible.builtin.command: git log + args: + warn: false + changed_when: false + + - name: Test latest[hg] (don't warn) + ansible.builtin.command: chmod 644 A + args: + warn: false + creates: B + + - name: Test latest[hg] (warn) + ansible.builtin.command: chmod 644 A + args: + warn: true + creates: B + + - name: Test latest[git] (don't warn single line) + ansible.builtin.command: warn=False chdir=/tmp/blah git log + changed_when: false + + - name: Test latest[hg] (don't warn single line) + ansible.builtin.command: warn=no creates=B chmod 644 A + + - name: Test latest[hg] (warn single line) + ansible.builtin.command: warn=yes creates=B chmod 644 A diff --git a/examples/playbooks/strict-mode.yml b/examples/playbooks/strict-mode.yml new file mode 100644 index 0000000..a69c428 --- /dev/null +++ b/examples/playbooks/strict-mode.yml @@ -0,0 +1,8 @@ +--- +- name: Fixture for test_strict + hosts: localhost + tasks: + - name: Display debug information + ansible.builtin.stat: + path2: echo "Hello World" # <-- args[module] due to invalid use of path2 instead of path + changed_when: false diff --git a/examples/playbooks/syntax-error-string.yml b/examples/playbooks/syntax-error-string.yml new file mode 100644 index 0000000..e158ea8 --- /dev/null +++ b/examples/playbooks/syntax-error-string.yml @@ -0,0 +1,7 @@ +# This file is valid YAML and passed JSON Schema validation but not ansible +# own syntax check. + +- hosts: localhost + tasks: + - name: Invalid syntax + x.y.z.w: {} diff --git a/examples/playbooks/syntax-error.yml b/examples/playbooks/syntax-error.yml new file mode 100644 index 0000000..8657633 --- /dev/null +++ b/examples/playbooks/syntax-error.yml @@ -0,0 +1,6 @@ +--- +- name: This should raise syntax-error + hosts: localhost + tasks: + ansible.builtin.debug: + msg: Note that `tasks` is not entered as a list. diff --git a/examples/playbooks/taskimports.yml b/examples/playbooks/taskimports.yml new file mode 100644 index 0000000..4e5b5f1 --- /dev/null +++ b/examples/playbooks/taskimports.yml @@ -0,0 +1,16 @@ +--- +- name: Fixture + hosts: localhost + vars: + varset: tasks/simple_task.yml + tasks: + - name: Import 1 # noqa: fqcn + import_tasks: tasks/nestedincludes.yml + # - import_tasks: "{{ varnotset }}.yml" + - name: Import 2 # noqa: fqcn + import_tasks: "{{ varset }}" + - name: Import 3 # noqa: fqcn + import_tasks: tasks/directory with spaces/main.yml + # Import tasks by FQCN as well to ensure they load + - name: Import 4 + ansible.builtin.import_tasks: tasks/passing_task.yml diff --git a/examples/playbooks/taskincludes.yml b/examples/playbooks/taskincludes.yml new file mode 100644 index 0000000..91f0fc6 --- /dev/null +++ b/examples/playbooks/taskincludes.yml @@ -0,0 +1,11 @@ +--- +- name: Fixture + hosts: localhost + vars: + varset: tasks/simple_task.yml + tasks: + # - include_tasks: "{{ varnotset }}.yml" + - include_tasks: "{{ varset }}.yml" + - include_tasks: tasks/directory with spaces/main.yml + # Include tasks by FQCN as well to ensure they load + - ansible.builtin.include_tasks: tasks/passing_task.yml diff --git a/examples/playbooks/tasks/bug-2875.yml b/examples/playbooks/tasks/bug-2875.yml new file mode 100644 index 0000000..73abe23 --- /dev/null +++ b/examples/playbooks/tasks/bug-2875.yml @@ -0,0 +1,9 @@ +--- +- name: "Reproduce bug #2875" + include_tasks: "{{ lookup('first_found', __ff_params) }}" + vars: + __ff_params: + files: + - "default.yml" + paths: + - "{{ role_path }}/tasks/setup" diff --git a/examples/playbooks/tasks/conflicting-action-statements.yml b/examples/playbooks/tasks/conflicting-action-statements.yml new file mode 100644 index 0000000..3fe4d71 --- /dev/null +++ b/examples/playbooks/tasks/conflicting-action-statements.yml @@ -0,0 +1,3 @@ +--- +- shell: pip + changed: false diff --git a/examples/playbooks/tasks/directory with spaces/main.yml b/examples/playbooks/tasks/directory with spaces/main.yml new file mode 100644 index 0000000..2d43b29 --- /dev/null +++ b/examples/playbooks/tasks/directory with spaces/main.yml @@ -0,0 +1,4 @@ +--- +# this should generate: unnamed-task: All tasks should be named +- ansible.builtin.assert: + fail_msg: tasks in directory with spaces included diff --git a/examples/playbooks/tasks/empty_blocks.yml b/examples/playbooks/tasks/empty_blocks.yml new file mode 100644 index 0000000..2555e35 --- /dev/null +++ b/examples/playbooks/tasks/empty_blocks.yml @@ -0,0 +1,17 @@ +--- +- name: A named block task + block: + - name: an assertion + ansible.builtin.assert: + fail_msg: foo + rescue: # null + always: {} +- block: + - name: Another assertion + ansible.builtin.assert: + fail_msg: bar + rescue: {} + always: + - name: Yet another assertion + ansible.builtin.assert: + fail_msg: baz diff --git a/examples/playbooks/tasks/include-in-block-inner.yml b/examples/playbooks/tasks/include-in-block-inner.yml new file mode 100644 index 0000000..bc6f81b --- /dev/null +++ b/examples/playbooks/tasks/include-in-block-inner.yml @@ -0,0 +1,5 @@ +--- +- name: include-in-block-inner | I am a block + block: + - name: simple_task | Include tasks from inside a block + ansible.builtin.include_tasks: simple_task.yml diff --git a/examples/playbooks/tasks/included-with-lint.yml b/examples/playbooks/tasks/included-with-lint.yml new file mode 100644 index 0000000..0f2bbd2 --- /dev/null +++ b/examples/playbooks/tasks/included-with-lint.yml @@ -0,0 +1,4 @@ +--- +# missing a task name +- ansible.builtin.assert: + fail_msg: foo diff --git a/examples/playbooks/tasks/malformed.yml b/examples/playbooks/tasks/malformed.yml new file mode 100644 index 0000000..f0805cd --- /dev/null +++ b/examples/playbooks/tasks/malformed.yml @@ -0,0 +1,3 @@ +--- +# should produce a malformed error from ansible syntax check +- oops this is invalid diff --git a/examples/playbooks/tasks/nestedincludes.yml b/examples/playbooks/tasks/nestedincludes.yml new file mode 100644 index 0000000..6c2631f --- /dev/null +++ b/examples/playbooks/tasks/nestedincludes.yml @@ -0,0 +1,3 @@ +--- +- name: simple_task | One include + include_tasks: tasks/simple_task.yml diff --git a/examples/playbooks/tasks/passing_task.yml b/examples/playbooks/tasks/passing_task.yml new file mode 100644 index 0000000..23a8651 --- /dev/null +++ b/examples/playbooks/tasks/passing_task.yml @@ -0,0 +1,4 @@ +--- +- name: passing_task | Simple task to include which generates no errors + ansible.builtin.assert: + that: true diff --git a/examples/playbooks/tasks/rule-name-prefix-fail.yml b/examples/playbooks/tasks/rule-name-prefix-fail.yml new file mode 100644 index 0000000..680ccd1 --- /dev/null +++ b/examples/playbooks/tasks/rule-name-prefix-fail.yml @@ -0,0 +1,13 @@ +--- +- name: rule-name-prefix-fail | this is not correctly capitalized + ansible.builtin.assert: + that: true +- name: This is missing the prefix + ansible.builtin.assert: + that: true +- name: name | This prefix is incomplete + ansible.builtin.assert: + that: true +- name: rule-name-prefix-fail | This is correctly | named too + ansible.builtin.assert: + that: true diff --git a/examples/playbooks/tasks/simple_task.yml b/examples/playbooks/tasks/simple_task.yml new file mode 100644 index 0000000..aea6938 --- /dev/null +++ b/examples/playbooks/tasks/simple_task.yml @@ -0,0 +1,4 @@ +--- +- name: simple_task | This is named + ansible.builtin.assert: + fail_msg: foo diff --git a/examples/playbooks/tasks/varset.yml b/examples/playbooks/tasks/varset.yml new file mode 100644 index 0000000..f4337eb --- /dev/null +++ b/examples/playbooks/tasks/varset.yml @@ -0,0 +1,4 @@ +--- +- debug: msg="var was set" + +- git: repo=hello.git diff --git a/examples/playbooks/tasks/varunset.yml b/examples/playbooks/tasks/varunset.yml new file mode 100644 index 0000000..a5508f0 --- /dev/null +++ b/examples/playbooks/tasks/varunset.yml @@ -0,0 +1,2 @@ +--- +- debug: msg="var was not set" diff --git a/examples/playbooks/tasks/x.yml b/examples/playbooks/tasks/x.yml new file mode 100644 index 0000000..a98230f --- /dev/null +++ b/examples/playbooks/tasks/x.yml @@ -0,0 +1,7 @@ +--- +- # nothing, checks bug #849 +- name: Test include + action: funny value=clown + args: + key: value +- # a second null task, validates yaml_utils.get_path_to_task diff --git a/examples/playbooks/templates/not-valid.yaml b/examples/playbooks/templates/not-valid.yaml new file mode 100644 index 0000000..5d34f55 --- /dev/null +++ b/examples/playbooks/templates/not-valid.yaml @@ -0,0 +1,2 @@ +# Used to validate that a templated YAML file does not confuse the linter +{% include 'port.j2' %} diff --git a/examples/playbooks/test_import_with_conflicting_action_statements.yml b/examples/playbooks/test_import_with_conflicting_action_statements.yml new file mode 100644 index 0000000..042a8dd --- /dev/null +++ b/examples/playbooks/test_import_with_conflicting_action_statements.yml @@ -0,0 +1,4 @@ +--- +- hosts: all + tasks: + - import_tasks: tasks/conflicting-action-statements.yml diff --git a/examples/playbooks/test_import_with_malformed.yml b/examples/playbooks/test_import_with_malformed.yml new file mode 100644 index 0000000..6ce21cd --- /dev/null +++ b/examples/playbooks/test_import_with_malformed.yml @@ -0,0 +1,4 @@ +--- +- hosts: all + tasks: + - import_tasks: tasks/malformed.yml diff --git a/examples/playbooks/test_include_inplace.yml b/examples/playbooks/test_include_inplace.yml new file mode 100644 index 0000000..0f4cf2c --- /dev/null +++ b/examples/playbooks/test_include_inplace.yml @@ -0,0 +1,5 @@ +--- +- name: Test fixture + hosts: all + roles: + - include_in_the_place diff --git a/examples/playbooks/test_include_relative.yml b/examples/playbooks/test_include_relative.yml new file mode 100644 index 0000000..340d949 --- /dev/null +++ b/examples/playbooks/test_include_relative.yml @@ -0,0 +1,6 @@ +--- +- name: Test fixture + hosts: localhost + gather_facts: false + roles: + - include_relative diff --git a/examples/playbooks/test_skip_inside_yaml.yml b/examples/playbooks/test_skip_inside_yaml.yml new file mode 100644 index 0000000..44d6c28 --- /dev/null +++ b/examples/playbooks/test_skip_inside_yaml.yml @@ -0,0 +1,50 @@ +--- +- name: Fixture + hosts: all + tags: + - skip_ansible_lint # should disable error at playbook level + tasks: + - name: Test # <-- 0 latest[hg] + action: ansible.builtin.hg # noqa: fqcn[canonical] + - name: Test latest[hg] (skipped) # noqa: latest[hg] fqcn[canonical] + action: ansible.builtin.hg + + - name: Test latest[git] and partial-become + action: ansible.builtin.git + become_user: alice + - name: Test latest[git] and partial-become (skipped) # noqa: latest[git] partial-become + action: ansible.builtin.git + become_user: alice + + - name: Test YAML # <-- 1 jinja[spacing] + ansible.builtin.get_url: + # noqa: risky-file-permissions + url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf # <-- 2 yaml[line-length] + dest: "{{dest_proj_path}}/foo.conf" + - name: Test YAML and jinja[spacing] (skipped) + ansible.builtin.get_url: + # noqa: risky-file-permissions + url: http://example.com/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/really_long_path/file.conf # noqa: yaml[line-length] + dest: "{{dest_proj_path}}/foo.conf" # noqa: jinja[spacing] + + - name: Test deprecated-command-syntax # <-- 3 deprecated-command-syntax + ansible.builtin.command: creates=B chmod 644 A # noqa: no-free-form + - name: Test deprecated-command-syntax # <-- 4 deprecated-command-syntax + ansible.builtin.command: warn=yes creates=B chmod 644 A # noqa: no-free-form + - name: Test deprecated-command-syntax (skipped via no warn) + ansible.builtin.command: warn=no creates=B chmod 644 A # noqa: no-free-form + - name: Test deprecated-command-syntax (skipped via skip_ansible_lint) + ansible.builtin.command: creates=B chmod 644 A # noqa: no-free-form + tags: + - skip_ansible_lint + +- name: Fixture 2 + hosts: localhost + tasks: + - name: Foo + become: true + block: + - name: Bar + become_user: john_doe + ansible.builtin.command: "/etc/test.sh" + changed_when: false diff --git a/examples/playbooks/unicode.transformed.yml b/examples/playbooks/unicode.transformed.yml new file mode 100644 index 0000000..c07e9e7 --- /dev/null +++ b/examples/playbooks/unicode.transformed.yml @@ -0,0 +1,8 @@ +--- +- name: Fixture + hosts: all + tasks: + # cspell:disable-next-line + - name: Some теÑÑ‚ + ansible.builtin.command: + cmd: uname diff --git a/examples/playbooks/unicode.yml b/examples/playbooks/unicode.yml new file mode 100644 index 0000000..c07e9e7 --- /dev/null +++ b/examples/playbooks/unicode.yml @@ -0,0 +1,8 @@ +--- +- name: Fixture + hosts: all + tasks: + # cspell:disable-next-line + - name: Some теÑÑ‚ + ansible.builtin.command: + cmd: uname diff --git a/examples/playbooks/valid.yml b/examples/playbooks/valid.yml new file mode 100644 index 0000000..3c67f66 --- /dev/null +++ b/examples/playbooks/valid.yml @@ -0,0 +1,4 @@ +--- +- name: Minimal test play + hosts: localhost + tasks: [] diff --git a/examples/playbooks/valid_with_alt_extension.yaml b/examples/playbooks/valid_with_alt_extension.yaml new file mode 100644 index 0000000..e3694c8 --- /dev/null +++ b/examples/playbooks/valid_with_alt_extension.yaml @@ -0,0 +1,7 @@ +--- +# Used to validate that we also accept .yaml extension on playbooks +- name: Fixture + hosts: localhost + tasks: + - ansible.builtin.debug: # <-- should notify about missing 'name' + msg: hello! diff --git a/examples/playbooks/vars/empty_vars.transformed.yml b/examples/playbooks/vars/empty_vars.transformed.yml new file mode 100644 index 0000000..a6e18c1 --- /dev/null +++ b/examples/playbooks/vars/empty_vars.transformed.yml @@ -0,0 +1,2 @@ +--- +# Make sure that the Transformer does not mangle comment-only files! diff --git a/examples/playbooks/vars/empty_vars.yml b/examples/playbooks/vars/empty_vars.yml new file mode 100644 index 0000000..a6e18c1 --- /dev/null +++ b/examples/playbooks/vars/empty_vars.yml @@ -0,0 +1,2 @@ +--- +# Make sure that the Transformer does not mangle comment-only files! diff --git a/examples/playbooks/vars/invalid_vars_schema.yml b/examples/playbooks/vars/invalid_vars_schema.yml new file mode 100644 index 0000000..d4142a2 --- /dev/null +++ b/examples/playbooks/vars/invalid_vars_schema.yml @@ -0,0 +1,2 @@ +--- +123: true # invalid as schema expects string key diff --git a/examples/playbooks/vars/jinja-spacing.yml b/examples/playbooks/vars/jinja-spacing.yml new file mode 100644 index 0000000..419c7bc --- /dev/null +++ b/examples/playbooks/vars/jinja-spacing.yml @@ -0,0 +1,41 @@ +--- +# Should raise jinja[spacing] at line [14, 15, 16, 17, 18, 19, 22, 32, 38], at following variables. +# ".bad_var_1", ".bad_var_2", ".bad_var_3", ".invalid_multiline_nested_json", ".invalid_nested_json" +good_var_1: "{{ good_format }}" +good_var_2: "Value: {{ good_format }}" +good_var_3: "{{ good_format | filter }}" +good_var_4: "Value: {{ good_format | filter }}" +jinja_escape_1: "{{ '{{' }}" +jinja_escape_2: docker info --format '{{ '{{' }}json .Swarm.LocalNodeState{{ '}}' }}' | tr -d '"' +jinja_whitespace_control: | + {{ good_format }}/ + {{- good_format }} + {{- good_format -}} +bad_var_1: "{{bad_format}}" # <-- 1 +bad_var_2: "Value: {{ bad_format}}" # <-- 2 +bad_var_3: "{{bad_format }}" # <-- 3 +bad_var_4: "{{ bad_format|filter }}" # <-- 4 +bad_var_5: "Value: {{ bad_format |filter }}" # <-- 5 +bad_var_6: "{{ bad_format| filter }}" # <-- 6 +# spell-checker: disable-next-line +non_jinja_var: "data = ${lookup{$local_part}lsearch{/etc/aliases}}" # noqa: jinja[spacing] +json_inside_jinja: "{{ {'test': {'subtest': variable}} }}" +multiline_vars: # Assert that no false positive on multiline exists + cases: + case1: >- + http://example.com/{{ + case1 }} + case2: >- + http://example.com/{{ + case2 }} +valid_nested_json: "{{ {'dummy_2': {'nested_dummy_1': 'value_1', 'nested_dummy_2': value_2}} | combine(dummy_1) }}" +invalid_nested_json: "{{ {'dummy_2': {'nested_dummy_1': 'value_1', 'nested_dummy_2': value_2}} | combine(dummy_1)}}" # <-- 7 + +valid_multiline_nested_json: >- + {{ {'dummy_2': {'nested_dummy_1': value_1, + 'nested_dummy_2': value_2}} | + combine(dummy_1) }} +invalid_multiline_nested_json: >- # ignored multiline expression, for now + {{ {'dummy_2': {'nested_dummy_1': value_1, + 'nested_dummy_2': value_2}} | + combine(dummy_1)}} diff --git a/examples/playbooks/vars/not_decryptable.yml b/examples/playbooks/vars/not_decryptable.yml new file mode 100644 index 0000000..9a19b07 --- /dev/null +++ b/examples/playbooks/vars/not_decryptable.yml @@ -0,0 +1,10 @@ +$ANSIBLE_VAULT;1.1;AES256 +35366433323361393130396530643233373262666636646439303032366431303363316232313738 +3738636130636431623936303932306430316635663136610a353737333966353462333532393631 +36613030643762636138613734313862333165346464626461313361353732336131633137653865 +3862386136386137650a303433643531613337393735633338383163353737656339653134346363 +63613436333937313738633437373566333065663662643664643261313366323236356364316663 +62336264666464323066336365616634626336616537646336656266343562336533343732613539 +61643661303566313664313164623731316236666235656337363632393665353536303730666532 +64666639306361653963363462393966623763626566613831613739333666333665343734333630 +63623730623033346163393834396639383234393637653733396466316132663131 diff --git a/examples/playbooks/vars/other.yml b/examples/playbooks/vars/other.yml new file mode 100644 index 0000000..2771218 --- /dev/null +++ b/examples/playbooks/vars/other.yml @@ -0,0 +1,2 @@ +--- +some_var: some_value_defined_in_vars_other diff --git a/examples/playbooks/vars/rule_jinja_vars.yml b/examples/playbooks/vars/rule_jinja_vars.yml new file mode 100644 index 0000000..da9e6d1 --- /dev/null +++ b/examples/playbooks/vars/rule_jinja_vars.yml @@ -0,0 +1,5 @@ +# this should not trigger any errors because a 'when' inside +# a vars files, does not use implicit jinja. +--- +foo: + when: "{{ var }}" diff --git a/examples/playbooks/vars/strings.transformed.yml b/examples/playbooks/vars/strings.transformed.yml new file mode 100644 index 0000000..d4a737a --- /dev/null +++ b/examples/playbooks/vars/strings.transformed.yml @@ -0,0 +1,39 @@ +--- +# Make sure that the Transformer does not mangle strings +# TODO: there is a bug in ruamel.yaml that discards some EOL comments + +single: single # this is a comment +single_with_double: '"single" quoted' # this is a comment + +single_multiline_with_octothorpe: "single over 160 char line to force wrapping. over 160 char line to force wrapping. over 160 char line to force wrapping. over 160\n\ + # this is not a comment" + +double: double # this is a comment +double_with_single: "'double' quoted" # this is a comment + +double_multiline_with_octothorpe: "double over 160 char line to force wrapping. over 160 char line to force wrapping. over 160 char line to force wrapping. over 160\n\ + # this is not a comment" + +# this is a comment +folded_block_scalar_with_octothorpe: > + # this is not a comment + +# this is a comment +folded_chomp_strip_block_scalar_with_octothorpe: >- + # this is not a comment + +# this is a comment +folded_chomp_keep_block_scalar_with_octothorpe: >+ + # this is not a comment + +# this is a comment +literal_block_scalar_with_octothorpe: | # this is a | EOL comment + # this is not a comment + +# this is a comment +literal_chomp_strip_block_scalar_with_octothorpe: |- # this is a | EOL comment + # this is not a comment + +# this is a comment +literal_chomp_keep_block_scalar_with_octothorpe: | # this is a | EOL comment + # this is not a comment diff --git a/examples/playbooks/vars/strings.yml b/examples/playbooks/vars/strings.yml new file mode 100644 index 0000000..6be848e --- /dev/null +++ b/examples/playbooks/vars/strings.yml @@ -0,0 +1,45 @@ +--- +# Make sure that the Transformer does not mangle strings +# TODO: there is a bug in ruamel.yaml that discards some EOL comments + +single: "single" # this is a comment +single_with_double: '"single" quoted' # this is a comment + +single_multiline_with_octothorpe: # this EOL comment gets lost + "single over 160 char line to force wrapping. over 160 char line to force wrapping. over 160 char line to force wrapping. + over 160 + + # this is not a comment" + +double: "double" # this is a comment +double_with_single: "'double' quoted" # this is a comment + +double_multiline_with_octothorpe: # this EOL comment gets lost + "double over 160 char line to force wrapping. over 160 char line to force wrapping. over 160 char line to force wrapping. + over 160 + + # this is not a comment" + +# this is a comment +folded_block_scalar_with_octothorpe: > # > EOL comment gets lost + # this is not a comment + +# this is a comment +folded_chomp_strip_block_scalar_with_octothorpe: >- # > EOL comment gets lost + # this is not a comment + +# this is a comment +folded_chomp_keep_block_scalar_with_octothorpe: >+ # > EOL comment gets lost + # this is not a comment + +# this is a comment +literal_block_scalar_with_octothorpe: | # this is a | EOL comment + # this is not a comment + +# this is a comment +literal_chomp_strip_block_scalar_with_octothorpe: |- # this is a | EOL comment + # this is not a comment + +# this is a comment +literal_chomp_keep_block_scalar_with_octothorpe: |+ # this is a | EOL comment + # this is not a comment diff --git a/examples/playbooks/vars/subfolder/settings.yml b/examples/playbooks/vars/subfolder/settings.yml new file mode 100644 index 0000000..1c84d58 --- /dev/null +++ b/examples/playbooks/vars/subfolder/settings.yml @@ -0,0 +1,2 @@ +--- +some_var: some_value_defined_in_vars_subfolder_settings diff --git a/examples/playbooks/with-skip-tag-id.yml b/examples/playbooks/with-skip-tag-id.yml new file mode 100644 index 0000000..b383617 --- /dev/null +++ b/examples/playbooks/with-skip-tag-id.yml @@ -0,0 +1,7 @@ +--- +- hosts: all + tasks: + - name: Trailing whitespace on this line + ansible.builtin.git: + repo: "{{ archive_services_repo_url }}" + dest: /home/www diff --git a/examples/playbooks/with-umlaut-ä.yml b/examples/playbooks/with-umlaut-ä.yml new file mode 100644 index 0000000..2ee7a83 --- /dev/null +++ b/examples/playbooks/with-umlaut-ä.yml @@ -0,0 +1,5 @@ +--- +- hosts: + - localhost + roles: + - name: node diff --git a/examples/reqs_v1/requirements.yml b/examples/reqs_v1/requirements.yml new file mode 100644 index 0000000..65643ae --- /dev/null +++ b/examples/reqs_v1/requirements.yml @@ -0,0 +1,10 @@ +--- +# v1 requirements test file + +# As Jeff never created releases on hit repo, we are unable to download tar.gz +# archives, so we need to use the slower git cloning instead. +# - src: https://github.com/geerlingguy/mysql/archive/refs/tags/4.2.0.tar.gz + +- name: geerlingguy.mysql + src: https://github.com/geerlingguy/ansible-role-mysql + version: 4.2.0 diff --git a/examples/reqs_v2/requirements.yml b/examples/reqs_v2/requirements.yml new file mode 100644 index 0000000..735bf3c --- /dev/null +++ b/examples/reqs_v2/requirements.yml @@ -0,0 +1,8 @@ +--- +roles: + - name: geerlingguy.mysql + src: https://github.com/geerlingguy/ansible-role-mysql + version: 4.2.0 + +collections: + - name: https://galaxy.ansible.com/download/community-molecule-0.1.0.tar.gz diff --git a/examples/roles/ansible-role-foo/tasks/main.yaml b/examples/roles/ansible-role-foo/tasks/main.yaml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/ansible-role-foo/tasks/main.yaml diff --git a/examples/roles/bobbins/tasks/imported_tasks.yml b/examples/roles/bobbins/tasks/imported_tasks.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/bobbins/tasks/imported_tasks.yml diff --git a/examples/roles/bobbins/tasks/main.yml b/examples/roles/bobbins/tasks/main.yml new file mode 100644 index 0000000..992c85c --- /dev/null +++ b/examples/roles/bobbins/tasks/main.yml @@ -0,0 +1,7 @@ +--- +- name: Test tasks + action: git a=b c=d + +- name: Import tasks + ansible.builtin.import_tasks: + file: imported_tasks.yml diff --git a/examples/roles/broken_argument_specs/meta/argument_specs.yml b/examples/roles/broken_argument_specs/meta/argument_specs.yml new file mode 100644 index 0000000..ddc9862 --- /dev/null +++ b/examples/roles/broken_argument_specs/meta/argument_specs.yml @@ -0,0 +1,5 @@ +--- +argument_specs: + main: + foo: bar # <-- invalid based on json schema + options: {} diff --git a/examples/roles/dependency_in_meta/meta/main.yml b/examples/roles/dependency_in_meta/meta/main.yml new file mode 100644 index 0000000..ce20d01 --- /dev/null +++ b/examples/roles/dependency_in_meta/meta/main.yml @@ -0,0 +1,42 @@ +--- +# meta file, determined by ending in meta/main.yml +# https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies +allow_duplicates: true +dependencies: + # from Bitbucket + - src: git+http://bitbucket.org/willthames/git-ansible-galaxy + version: v1.4 + + # from Bitbucket, alternative syntax and caveats + - src: http://bitbucket.org/willthames/hg-ansible-galaxy + scm: hg + + # from galaxy + - src: yatesr.timezone + + # from GitHub + - src: https://github.com/bennojoy/nginx + + # from GitHub, overriding the name and specifying a specific tag + - src: https://github.com/bennojoy/nginx + version: master + name: nginx_role + + # from GitLab or other git-based scm + - src: git@gitlab.company.com:my-group/my-repo.git + scm: git + version: "0.1" # quoted, so YAML doesn't parse this as a floating-point value + + # from a webserver, where the role is packaged in a tar.gz + - src: https://some.webserver.example.com/files/master.tar.gz + name: http-role + +galaxy_info: + standalone: true + author: foo + description: Testing meta + company: Not applicable + license: MIT + min_ansible_version: "2.5" + platforms: + - name: Fedora diff --git a/examples/roles/foo.yml b/examples/roles/foo.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/foo.yml diff --git a/examples/roles/hello/meta/argument_specs.yml b/examples/roles/hello/meta/argument_specs.yml new file mode 100644 index 0000000..47bde78 --- /dev/null +++ b/examples/roles/hello/meta/argument_specs.yml @@ -0,0 +1,27 @@ +--- +# https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation +argument_specs: + main: + short_description: The main entry point for the role. + description: "a longer description" + options: + my_app_int: + type: "int" + required: false + default: 42 + description: "The integer value, defaulting to 42." + no_log: false + + my_app_str: + type: "str" + required: true + description: "The string value" + + alternate: + short_description: The alternate entry point for the my_app role. + options: + my_app_int: + type: "int" + required: false + default: 1024 + description: "The integer value, defaulting to 1024." diff --git a/examples/roles/hello/meta/main.yml b/examples/roles/hello/meta/main.yml new file mode 100644 index 0000000..b15a998 --- /dev/null +++ b/examples/roles/hello/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: bobbins diff --git a/examples/roles/include_in_the_place/tasks/included_file.yml b/examples/roles/include_in_the_place/tasks/included_file.yml new file mode 100644 index 0000000..8dd30a3 --- /dev/null +++ b/examples/roles/include_in_the_place/tasks/included_file.yml @@ -0,0 +1,4 @@ +--- +- name: included_file | Test Fixture + ansible.builtin.debug: + msg: "was found & included" diff --git a/examples/roles/include_in_the_place/tasks/main.yml b/examples/roles/include_in_the_place/tasks/main.yml new file mode 100644 index 0000000..4e0d500 --- /dev/null +++ b/examples/roles/include_in_the_place/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Include a task + ansible.builtin.include_tasks: included_file.yml diff --git a/examples/roles/include_miss/tasks/main.yml b/examples/roles/include_miss/tasks/main.yml new file mode 100644 index 0000000..7df4ff2 --- /dev/null +++ b/examples/roles/include_miss/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Include a missing file + ansible.builtin.include_tasks: tasks/noexist_file.yml diff --git a/examples/roles/include_relative/tasks/included_file.yml b/examples/roles/include_relative/tasks/included_file.yml new file mode 100644 index 0000000..ef81833 --- /dev/null +++ b/examples/roles/include_relative/tasks/included_file.yml @@ -0,0 +1,4 @@ +--- +- name: included_file | Sample debug task + ansible.builtin.debug: + msg: "was found & included" diff --git a/examples/roles/include_relative/tasks/main.yml b/examples/roles/include_relative/tasks/main.yml new file mode 100644 index 0000000..de69255 --- /dev/null +++ b/examples/roles/include_relative/tasks/main.yml @@ -0,0 +1,7 @@ +--- +# ansible allow paths relative to role directory +- name: 1st include + ansible.builtin.include_tasks: tasks/included_file.yml +# relative to the current file, same effect as previous line +- name: 2nd include + ansible.builtin.include_tasks: included_file.yml diff --git a/examples/roles/invalid-name/tasks/main.yaml b/examples/roles/invalid-name/tasks/main.yaml new file mode 100644 index 0000000..228dca3 --- /dev/null +++ b/examples/roles/invalid-name/tasks/main.yaml @@ -0,0 +1,4 @@ +--- +- name: Foo + ansible.builtin.debug: + msg: foo diff --git a/examples/roles/invalid_due_syntax/tasks/main.yml b/examples/roles/invalid_due_syntax/tasks/main.yml new file mode 100644 index 0000000..b029f9c --- /dev/null +++ b/examples/roles/invalid_due_syntax/tasks/main.yml @@ -0,0 +1,2 @@ +--- +- name: Fixture for testing syntax-check[specific] on roles diff --git a/examples/roles/invalid_due_to_meta/meta/main.yml b/examples/roles/invalid_due_to_meta/meta/main.yml new file mode 100644 index 0000000..c389d74 --- /dev/null +++ b/examples/roles/invalid_due_to_meta/meta/main.yml @@ -0,0 +1,10 @@ +--- +galaxy_info: + standalone: true + role_name: invalid-due-to-meta # <-- invalid role name + author: foo + description: foo + license: MIT + platforms: + - name: AIX + min_ansible_version: "2.7" diff --git a/examples/roles/invalid_due_to_meta/tasks/main.yaml b/examples/roles/invalid_due_to_meta/tasks/main.yaml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/invalid_due_to_meta/tasks/main.yaml diff --git a/examples/roles/invalid_meta_schema/meta/main.yml b/examples/roles/invalid_meta_schema/meta/main.yml new file mode 100644 index 0000000..c8ea8e2 --- /dev/null +++ b/examples/roles/invalid_meta_schema/meta/main.yml @@ -0,0 +1,9 @@ +--- +galaxy_info: + standalone: true + author: foo + description: false # <-- schema fail as string is expected + license: XXX + platforms: + - name: AIX + min_ansible_version: "2.7" diff --git a/examples/roles/invalid_requirements_schema/meta/requirements.yml b/examples/roles/invalid_requirements_schema/meta/requirements.yml new file mode 100644 index 0000000..41a70af --- /dev/null +++ b/examples/roles/invalid_requirements_schema/meta/requirements.yml @@ -0,0 +1,3 @@ +--- +# this should fail validation +foo: bar diff --git a/examples/roles/invalud_meta_schema b/examples/roles/invalud_meta_schema new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/invalud_meta_schema diff --git a/examples/roles/loop_var_prefix/tasks/fail.yml b/examples/roles/loop_var_prefix/tasks/fail.yml new file mode 100644 index 0000000..311cf27 --- /dev/null +++ b/examples/roles/loop_var_prefix/tasks/fail.yml @@ -0,0 +1,38 @@ +--- +# 5 expected loop-var-prefix failures at 3, 9, 19, 26, 33 +- name: fail | That should trigger loop-var-prefix + ansible.builtin.debug: + var: item + loop: + - foo + - bar +- name: fail | That should fail due to wrong prefix + ansible.builtin.debug: + var: zz_item + loop: + - foo + - bar + loop_control: + loop_var: zz_item +- name: fail | Using a block + block: + - name: fail | That should also not pass + ansible.builtin.debug: + var: item + loop: + - apples + - oranges + rescue: + - name: fail | That should also not pass + ansible.builtin.debug: + var: item + loop: + - avocados + - kiwis + always: + - name: fail | That should also not pass + ansible.builtin.debug: + var: item + loop: + - bananas + - muscats diff --git a/examples/roles/loop_var_prefix/tasks/pass.yml b/examples/roles/loop_var_prefix/tasks/pass.yml new file mode 100644 index 0000000..328c0a7 --- /dev/null +++ b/examples/roles/loop_var_prefix/tasks/pass.yml @@ -0,0 +1,30 @@ +--- +# 0 expected loop-var-prefix failures +- name: pass | That should pass + ansible.builtin.debug: + var: loop_var_prefix_item + loop: + - foo + - bar + loop_control: + loop_var: loop_var_prefix_item +- name: pass | Using a block + block: + - name: pass | That should also pass + ansible.builtin.debug: + var: loop_var_prefix_item + loop: + - foo + - bar + loop_control: + loop_var: loop_var_prefix_item +- name: pass | Using alternative double underline prefix + block: + - name: pass | That should also pass + ansible.builtin.debug: + var: __some_item + loop: + - foo + - bar + loop_control: + loop_var: __some_item diff --git a/examples/roles/meta_noqa/meta/main.yml b/examples/roles/meta_noqa/meta/main.yml new file mode 100644 index 0000000..62ccab3 --- /dev/null +++ b/examples/roles/meta_noqa/meta/main.yml @@ -0,0 +1,8 @@ +--- +galaxy_info: # noqa: meta-incorrect + standalone: true + author: your-name # noqa: meta-no-info + description: missing min_ansible_version and platforms. author default not changed + license: MIT + min_ansible_version: "2.10" + platforms: [] diff --git a/examples/roles/more_complex/handlers/main.yml b/examples/roles/more_complex/handlers/main.yml new file mode 100644 index 0000000..1476749 --- /dev/null +++ b/examples/roles/more_complex/handlers/main.yml @@ -0,0 +1,3 @@ +--- +- name: Restart service using command + command: service bar restart diff --git a/examples/roles/more_complex/tasks/main.yml b/examples/roles/more_complex/tasks/main.yml new file mode 100644 index 0000000..62880a6 --- /dev/null +++ b/examples/roles/more_complex/tasks/main.yml @@ -0,0 +1,9 @@ +--- +- name: Test bad command + action: command mkdir blah + +- name: Test bad command v2 + command: mkdir blah + +- name: Test bad local command + local_action: shell touch foo diff --git a/examples/roles/role_for_no_same_owner/tasks/fail.yml b/examples/roles/role_for_no_same_owner/tasks/fail.yml new file mode 100644 index 0000000..b1e01b0 --- /dev/null +++ b/examples/roles/role_for_no_same_owner/tasks/fail.yml @@ -0,0 +1,71 @@ +--- +- name: fail | Block + block: + - name: fail | Synchronize-in-block + ansible.posix.synchronize: + src: dummy + dest: dummy + +- name: fail | Synchronize + ansible.posix.synchronize: + src: dummy + dest: dummy + +- name: fail | Nested-block + block: + - name: fail | Synchronize + block: + - name: fail | Synchronize-in-deep-block + ansible.posix.synchronize: + src: dummy + dest: dummy + rescue: + - name: fail | Synchronize-in-rescue + ansible.posix.synchronize: + src: dummy + dest: dummy + always: + - name: fail | Synchronize-in-always + ansible.posix.synchronize: + src: dummy + dest: dummy + +- name: fail | Unarchive-bz2 + ansible.builtin.unarchive: + src: "{{ file }}.tar.bz2" + dest: dummy + +- name: fail | Unarchive delegated + ansible.builtin.unarchive: + src: "{{ file }}.tar.bz2" + dest: dummy + delegate_to: localhost + +- name: fail | Unarchive-gz + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: dummy + +- name: fail | Unarchive-tar + ansible.builtin.unarchive: + src: "{{ file }}.tar" + dest: dummy + +- name: fail | Unarchive-xz + ansible.builtin.unarchive: + src: "{{ file }}.tar.xz" + dest: dummy + +- name: fail | Unarchive-zip + ansible.builtin.unarchive: + src: "{{ file }}.zip" + dest: dummy + extra_opts: + - -X + +- name: fail | Unarchive-zip-same-owner + ansible.builtin.unarchive: + src: "{{ file }}.zip" + dest: dummy + extra_opts: + - -X diff --git a/examples/roles/role_for_no_same_owner/tasks/pass.yml b/examples/roles/role_for_no_same_owner/tasks/pass.yml new file mode 100644 index 0000000..3af914b --- /dev/null +++ b/examples/roles/role_for_no_same_owner/tasks/pass.yml @@ -0,0 +1,32 @@ +--- +- name: pass | Synchronize-delegate + ansible.posix.synchronize: + src: dummy + dest: dummy + delegate_to: localhost + +- name: pass | Synchronize-no-same-owner + ansible.posix.synchronize: + src: dummy + dest: dummy + owner: false + group: false + +- name: pass | Unarchive-no-same-owner + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: dummy + extra_opts: + - --no-same-owner + +- name: pass | Unarchive-remote-src + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: dummy + extra_opts: + - --no-same-owner + +- name: pass | Unarchive-unknown-file-ending + ansible.builtin.unarchive: + src: "{{ file }}" + dest: dummy diff --git a/examples/roles/role_with_task_inclusions/tasks/imported_tasks.yml b/examples/roles/role_with_task_inclusions/tasks/imported_tasks.yml new file mode 100644 index 0000000..601b164 --- /dev/null +++ b/examples/roles/role_with_task_inclusions/tasks/imported_tasks.yml @@ -0,0 +1,3 @@ +--- +# this task is missing a name (unnamed-task) +- ansible.builtin.ping: diff --git a/examples/roles/role_with_task_inclusions/tasks/included_tasks.yml b/examples/roles/role_with_task_inclusions/tasks/included_tasks.yml new file mode 100644 index 0000000..601b164 --- /dev/null +++ b/examples/roles/role_with_task_inclusions/tasks/included_tasks.yml @@ -0,0 +1,3 @@ +--- +# this task is missing a name (unnamed-task) +- ansible.builtin.ping: diff --git a/examples/roles/role_with_task_inclusions/tasks/main.yml b/examples/roles/role_with_task_inclusions/tasks/main.yml new file mode 100644 index 0000000..46bc212 --- /dev/null +++ b/examples/roles/role_with_task_inclusions/tasks/main.yml @@ -0,0 +1,10 @@ +--- +- name: Include 1 + ansible.builtin.include_tasks: included_tasks.yml +- name: Include 2 + ansible.builtin.import_tasks: imported_tasks.yml +- name: Include 3 + ansible.builtin.include_tasks: + file: included_tasks.yml + apply: + tags: some_tag diff --git a/examples/roles/subfolder/1st_role/tasks/main.yml b/examples/roles/subfolder/1st_role/tasks/main.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/subfolder/1st_role/tasks/main.yml diff --git a/examples/roles/subfolder/2nd_role/tasks/main.yml b/examples/roles/subfolder/2nd_role/tasks/main.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/subfolder/2nd_role/tasks/main.yml diff --git a/examples/roles/subfolder/3rd_role/tasks/main.yml b/examples/roles/subfolder/3rd_role/tasks/main.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/subfolder/3rd_role/tasks/main.yml diff --git a/examples/roles/subfolder/other_role/tasks/main.yml b/examples/roles/subfolder/other_role/tasks/main.yml new file mode 100644 index 0000000..b4d7f48 --- /dev/null +++ b/examples/roles/subfolder/other_role/tasks/main.yml @@ -0,0 +1,4 @@ +--- +- name: Foo + debug: + msg: "Hello!" diff --git a/examples/roles/template_lookup/files/a_file b/examples/roles/template_lookup/files/a_file new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/template_lookup/files/a_file diff --git a/examples/roles/template_lookup/tasks/main.yml b/examples/roles/template_lookup/tasks/main.yml new file mode 100644 index 0000000..db2e490 --- /dev/null +++ b/examples/roles/template_lookup/tasks/main.yml @@ -0,0 +1,10 @@ +--- +# Place tasks in a block as templates are not rendered for top-level tasks +# in roles. Specifically, in `play_children()` of src/ansiblelint/utils.py, +# tasks in blocks go through `delegate_map['block']`, while top-level tasks +# in a role is not handled by `delegate_map`. +# Ref: https://github.com/ansible/ansible-lint/blob/v5.0.12/src/ansiblelint/utils.py#L305 +- block: + - name: Bug demo + ansible.builtin.debug: + msg: '{{ lookup("file", "a_file") }}' diff --git a/examples/roles/template_lookup_missing/tasks/main.yml b/examples/roles/template_lookup_missing/tasks/main.yml new file mode 100644 index 0000000..fc0542c --- /dev/null +++ b/examples/roles/template_lookup_missing/tasks/main.yml @@ -0,0 +1,7 @@ +--- +# See comments in `examples/roles/template_lookup/tasks/main.yml` +# for why the task is in a block. +- block: + - name: Bug demo + ansible.builtin.debug: + msg: '{{ lookup("file", "a_file") }}' diff --git a/examples/roles/test-role/meta/requirements.yml b/examples/roles/test-role/meta/requirements.yml new file mode 100644 index 0000000..8e7fccc --- /dev/null +++ b/examples/roles/test-role/meta/requirements.yml @@ -0,0 +1,3 @@ +--- +roles: [] +collections: [] diff --git a/examples/roles/test-role/molecule/default/include-import-role.yml b/examples/roles/test-role/molecule/default/include-import-role.yml new file mode 100644 index 0000000..6d56c42 --- /dev/null +++ b/examples/roles/test-role/molecule/default/include-import-role.yml @@ -0,0 +1,6 @@ +--- +- name: test + gather_facts: false + hosts: all + roles: + - role: test-role diff --git a/examples/roles/test-role/tasks/main.yml b/examples/roles/test-role/tasks/main.yml new file mode 100644 index 0000000..8e5402f --- /dev/null +++ b/examples/roles/test-role/tasks/main.yml @@ -0,0 +1,4 @@ +--- +- name: Shell instead of command + ansible.builtin.shell: + cmd: echo hello world diff --git a/examples/roles/test-role/tasks/world.yml b/examples/roles/test-role/tasks/world.yml new file mode 100644 index 0000000..3f3fd3e --- /dev/null +++ b/examples/roles/test-role/tasks/world.yml @@ -0,0 +1,3 @@ +--- +- command: # noqa: fqcn + cmd: echo this is a task without a name # noqa: no-free-form diff --git a/examples/roles/valid-due-to-meta/meta/main.yml b/examples/roles/valid-due-to-meta/meta/main.yml new file mode 100644 index 0000000..32fc400 --- /dev/null +++ b/examples/roles/valid-due-to-meta/meta/main.yml @@ -0,0 +1,10 @@ +--- +galaxy_info: + standalone: true + role_name: valid_due_to_meta + author: foo + description: foo + license: MIT + platforms: + - name: Fedora + min_ansible_version: "2.7" diff --git a/examples/roles/valid-due-to-meta/tasks/debian/main.yml b/examples/roles/valid-due-to-meta/tasks/debian/main.yml new file mode 100644 index 0000000..6fa48c2 --- /dev/null +++ b/examples/roles/valid-due-to-meta/tasks/debian/main.yml @@ -0,0 +1,2 @@ +# This empty task file is here to test that roles with tasks organized in subdirectories +# are handled correctly by ansible-lint. diff --git a/examples/roles/valid-due-to-meta/tasks/main.yaml b/examples/roles/valid-due-to-meta/tasks/main.yaml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/roles/valid-due-to-meta/tasks/main.yaml diff --git a/examples/roles/var_naming_pattern/.ansible-lint b/examples/roles/var_naming_pattern/.ansible-lint new file mode 100644 index 0000000..d6dc161 --- /dev/null +++ b/examples/roles/var_naming_pattern/.ansible-lint @@ -0,0 +1,2 @@ +--- +var_naming_pattern: "^[a-z][a-z0-9_]*[a-z0-9]__[a-z][a-z0-9_]*[a-z0-9]$" diff --git a/examples/roles/var_naming_pattern/tasks/main.yml b/examples/roles/var_naming_pattern/tasks/main.yml new file mode 100644 index 0000000..4964f17 --- /dev/null +++ b/examples/roles/var_naming_pattern/tasks/main.yml @@ -0,0 +1,4 @@ +--- +- name: Foobar + ansible.builtin.set_fact: + k8s_grafana_users__namespace: "foo" diff --git a/examples/rulebooks/rulebook.yml b/examples/rulebooks/rulebook.yml new file mode 100644 index 0000000..1f5d444 --- /dev/null +++ b/examples/rulebooks/rulebook.yml @@ -0,0 +1,19 @@ +--- +- name: Demo rules with kafka as source + hosts: localhost + sources: + - name: kafka + kafka: + topic: eda + host: localhost + port: 9092 + group_id: testing + rules: + - name: + condition: event.i is defined + action: + debug: + - name: + condition: event.stop == true + action: + shutdown: diff --git a/examples/rules/task_has_tag.py b/examples/rules/task_has_tag.py new file mode 100644 index 0000000..e9c6a3e --- /dev/null +++ b/examples/rules/task_has_tag.py @@ -0,0 +1,41 @@ +"""Example implementation of a rule requiring tasks to have tags set.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class TaskHasTag(AnsibleLintRule): + """Tasks must have tag.""" + + id = "EXAMPLE001" + description = "Tasks must have tag" + tags = ["productivity", "tags"] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Task matching method.""" + if isinstance(task, str): + return False + + # If the task include another task or make the playbook fail + # Don't force to have a tag + if not set(task.keys()).isdisjoint(["include", "fail"]): + return False + + if not set(task.keys()).isdisjoint(["include_tasks", "fail"]): + return False + + if not set(task.keys()).isdisjoint(["import_tasks", "fail"]): + return False + + # Task should have tags + if "tags" not in task: + return True + + return False diff --git a/examples/templates/playbooks/playbook.yml b/examples/templates/playbooks/playbook.yml new file mode 100644 index 0000000..bf85245 --- /dev/null +++ b/examples/templates/playbooks/playbook.yml @@ -0,0 +1,6 @@ +--- +# even if is hosted under playbooks and is named playbook.yml, this file +# is not a real playbook because it is hosted in a "templates" directory +# and that means we will avoid processing it as a playbook. Templates use +# either jinja2 or another templating engine, so we cannot load them. +foo: bar diff --git a/examples/testproject/roles/test-role/tasks/main.yml b/examples/testproject/roles/test-role/tasks/main.yml new file mode 100644 index 0000000..fda441f --- /dev/null +++ b/examples/testproject/roles/test-role/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Shell instead of command + shell: echo hello world diff --git a/examples/yamllint/invalid.yml b/examples/yamllint/invalid.yml new file mode 100644 index 0000000..44e4072 --- /dev/null +++ b/examples/yamllint/invalid.yml @@ -0,0 +1,9 @@ +# missing document-start +foo: ... +foo: ... # <-- key-duplicates +bar: ... # <-- wrong comment indentation + +# next line has trailing-spaces: +other: aaa + +# ^ empty-lines diff --git a/examples/yamllint/multi-document.yaml b/examples/yamllint/multi-document.yaml new file mode 100644 index 0000000..e3d8805 --- /dev/null +++ b/examples/yamllint/multi-document.yaml @@ -0,0 +1,4 @@ +--- +foo: ... +--- +bar: ... diff --git a/examples/yamllint/valid.yml b/examples/yamllint/valid.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/examples/yamllint/valid.yml diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..817e879 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,197 @@ +--- +site_name: Ansible Lint Documentation +site_url: https://ansible-lint.readthedocs.io/ +repo_url: https://github.com/ansible/ansible-lint +edit_uri: blob/main/docs/ +copyright: Copyright © 2023 Red Hat, Inc. +docs_dir: docs +strict: true + +extra_css: + - stylesheets/extra.css + +theme: + name: "material" + logo: images/logo.svg + favicon: images/favicon.ico + features: + - content.code.copy + - content.action.edit + - navigation.expand + - navigation.sections + - navigation.instant + - navigation.indexes + - navigation.tracking + - toc.integrate + palette: + - media: "(prefers-color-scheme: light)" + primary: teal + accent: blue + scheme: default + toggle: + icon: material/brightness-7 + name: Switch to dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: teal + accent: blue + toggle: + icon: material/brightness-4 + name: Switch to light mode +extra: + social: + - icon: fontawesome/brands/python + link: https://pypi.org/project/ansible-lint/ + name: PyPI + - icon: fontawesome/solid/scroll + link: https://github.com/ansible/ansible-lint/releases + name: Releases + - icon: simple/mastodon + link: https://fosstodon.org/@ansible + name: Mastodon + - icon: fontawesome/brands/twitter + link: https://twitter.com/ansible + name: Twitter + - icon: simple/matrix + link: https://matrix.to/#/#devtools:ansible.com + name: Matrix + - icon: fontawesome/solid/comments + link: https://github.com/ansible/ansible-lint/discussions + name: Discussions + - icon: fontawesome/brands/github-alt + link: https://github.com/ansible/ansible-lint + name: GitHub + +nav: + - User Guide: + - home: index.md + - philosophy.md + - installing.md + - usage.md + - configuring.md + - profiles.md + - Rules: + - index: rules/index.md + - rules/args.md + - rules/avoid-implicit.md + - rules/command-instead-of-module.md + - rules/command-instead-of-shell.md + - rules/deprecated-bare-vars.md + - rules/deprecated-command-syntax.md + - rules/deprecated-local-action.md + - rules/deprecated-module.md + - rules/empty-string-compare.md + - rules/fqcn.md + - rules/galaxy.md + - rules/ignore-errors.md + - rules/inline-env-var.md + - rules/internal-error.md + - rules/jinja.md + - rules/key-order.md + - rules/latest.md + - rules/literal-compare.md + - rules/load-failure.md + - rules/loop-var-prefix.md + - rules/meta-incorrect.md + - rules/meta-no-info.md + - rules/meta-no-tags.md + - rules/meta-runtime.md + - rules/meta-video-links.md + - rules/name.md + - rules/no-changed-when.md + - rules/no-free-form.md + - rules/no-handler.md + - rules/no-jinja-when.md + - rules/no-log-password.md + - rules/no-prompting.md + - rules/no-relative-paths.md + - rules/no-same-owner.md + - rules/no-tabs.md + - rules/only-builtins.md + - rules/package-latest.md + - rules/parser-error.md + - rules/partial-become.md + - rules/playbook-extension.md + - rules/risky-file-permissions.md + - rules/risky-octal.md + - rules/risky-shell-pipe.md + - rules/role-name.md + - rules/run-once.md + - rules/schema.md + - rules/syntax-check.md + - rules/var-naming.md + - rules/warning.md + - rules/yaml.md + - Developer Guide: + - Contributing: contributing.md + - custom-rules.md + +# - installation.md +# - using: +# - getting-started.md +# - usage.md +# - configuration.md +# - examples.md +# - faq.md +# - contributing.md +# - tags.md +# # - subprocess-tee: '!import https://github.com/pycontribs/subprocess-tee?branch=main&multi_docs=False' +# # - xxx: '!import https://github.com/eclipse-opensmartclide/smartclide-docs' + +plugins: + - autorefs + - markdown-exec + - gen-files: + scripts: + - src/ansiblelint/generate_docs.py + - search + - social + - tags + # https://github.com/manuzhang/mkdocs-htmlproofer-plugin + # - htmlproofer + - mkdocstrings: + handlers: + python: + paths: [src] + options: + # Sphinx is for historical reasons, but we could consider switching if needed + # https://mkdocstrings.github.io/griffe/docstrings/ + docstring_style: sphinx + merge_init_into_class: yes + show_submodules: yes + import: + - url: https://docs.ansible.com/ansible/latest/objects.inv + domains: [py, std] + +markdown_extensions: + - admonition + - def_list + - footnotes + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.snippets: + check_paths: true + - pymdownx.superfences + - pymdownx.magiclink: + repo_url_shortener: true + repo_url_shorthand: true + social_url_shorthand: true + social_url_shortener: true + user: facelessuser + repo: pymdown-extensions + normalize_issue_symbols: true + - pymdownx.tabbed: + alternate_style: true + - toc: + toc_depth: 2 + permalink: true + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - name: python + class: python + validator: !!python/name:markdown_exec.validator + format: !!python/name:markdown_exec.formatter diff --git a/playbook.yml b/playbook.yml new file mode 100644 index 0000000..f55677e --- /dev/null +++ b/playbook.yml @@ -0,0 +1,7 @@ +--- +- name: Example + hosts: localhost + tasks: + - name: include extra tasks + ansible.builtin.include_tasks: + file: /dev/null diff --git a/plugins/modules/fake_module.py b/plugins/modules/fake_module.py new file mode 100644 index 0000000..141362e --- /dev/null +++ b/plugins/modules/fake_module.py @@ -0,0 +1,16 @@ +"""Sample custom ansible module named fake_module. + +This is used to test ability to detect and use custom modules. +""" +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Return the module instance.""" + return AnsibleModule( + argument_spec={ + "data": {"default": None}, + "path": {"default": None}, + "file": {"default": None}, + } + ) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..17a69d2 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,210 @@ +[build-system] +requires = [ + "setuptools >= 63.0.0", # required by pyproject+setuptools_scm integration + "setuptools_scm[toml] >= 7.0.5", # required for "no-local-version" scheme + +] +build-backend = "setuptools.build_meta" + +[project] +# https://peps.python.org/pep-0621/#readme +requires-python = ">=3.8" +dynamic = ["version", "dependencies", "optional-dependencies"] +name = "ansible-lint" +description = "Checks playbooks for practices and behavior that could potentially be improved" +readme = "README.md" +authors = [{ "name" = "Will Thames", "email" = "will@thames.id.au" }] +maintainers = [{ "name" = "Ansible by Red Hat", "email" = "info@ansible.com" }] +license = { text = "GPLv3+" } +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Environment :: Console", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS", + "Operating System :: POSIX", + "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python", + "Topic :: System :: Systems Administration", + "Topic :: Software Development :: Quality Assurance", + "Topic :: Software Development :: Testing", + "Topic :: Utilities", +] +keywords = ["ansible", "lint"] + +[project.urls] +homepage = "https://github.com/ansible/ansible-lint" +documentation = "https://ansible-lint.readthedocs.io/" +repository = "https://github.com/ansible/ansible-lint" +changelog = "https://github.com/ansible/ansible-lint/releases" + +[project.scripts] +ansible-lint = "ansiblelint.__main__:_run_cli_entrypoint" + +[tool.black] +target-version = ["py38"] + +[tool.codespell] +skip = ".tox,.mypy_cache,build,.git,.eggs,pip-wheel-metadata" +# indention is a typo in ruamel.yaml's API +ignore-words-list = "indention" + +[tool.coverage.run] +source = ["src"] +# Do not use branch until bug is fixes: +# https://github.com/nedbat/coveragepy/issues/605 +# branch = true +parallel = true +concurrency = ["multiprocessing", "thread"] + +# Keep this default because xml/report do not know to use load it from config file: +# data_file = ".coverage" +[tool.coverage.paths] +source = ["src", ".tox/*/site-packages"] + +[tool.coverage.report] +exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:"] +omit = ["*/src/*/_vendor/*", "test/*"] +# Increase it just so it would pass on any single-python run +fail_under = 93 +skip_covered = true +skip_empty = true +# During development we might remove code (files) with coverage data, and we dont want to fail: +ignore_errors = true +show_missing = true + +[tool.isort] +profile = "black" +# add_imports = "from __future__ import annotations" +known_first_party = "ansiblelint" +known_third_party = "ansible,pytest,ruamel,setuptools,yaml" +# https://black.readthedocs.io/en/stable/the_black_code_style.html#line-length +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true +line_length = 88 + +[tool.mypy] +python_version = 3.9 +color_output = true +error_summary = true +disallow_untyped_calls = true +disallow_untyped_defs = true +disallow_any_generics = true +# disallow_any_unimported = True +# warn_redundant_casts = True +# warn_return_any = True +# warn_unused_configs = True +# site-packages is here to help vscode mypy integration getting confused +exclude = "(build|dist|test/local-content|site-packages|~/.pyenv)" + +[[tool.mypy.overrides]] +module = ["ansible.*", "yamllint.*", "ansiblelint._version"] +ignore_missing_imports = true +ignore_errors = true + +[tool.pylint.MAIN] +extension-pkg-allow-list = ["black.parsing"] + +[tool.pylint.IMPORTS] +preferred-modules = ["py:pathlib", "unittest:pytest"] + +[tool.pylint.MASTER] +# pylint defaults + f,fh,v,id +good-names = ["i", "j", "k", "ex", "Run", "_", "f", "fh", "v", "id", "T"] +# Ignore as being generated: +ignore-paths = "^src/ansiblelint/(_version|_vendor).*$" + +[tool.pylint."MESSAGES CONTROL"] +# increase from default is 50 which is too aggressive +max-statements = 60 +disable = [ + # On purpose disabled as we rely on black + "line-too-long", + # TODO(ssbarnea): remove temporary skips adding during initial adoption: + "duplicate-code", + # unable to disable it inside tests + # https://github.com/PyCQA/pylint/issues/850 + "cyclic-import", +] + +[tool.pylint.TYPECHECK] +# pylint is unable to detect Namespace attributes and will throw a E1101 +generated-members = "options.*" + +[tool.pylint.SUMMARY] +# We don't need the score spamming console, as we either pass or fail +score = "n" + +[tool.pyright] +# https://github.com/microsoft/pyright/blob/main/docs/configuration.md#sample-pyprojecttoml-file +pythonVersion = "3.9" +include = ["src"] +# https://github.com/microsoft/pyright/issues/777 +"stubPath" = "" + +# spell-checker:ignore filterwarnings norecursedirs optionflags +[tool.pytest.ini_options] +# do not add options here as this will likely break either console runs or IDE +# integration like vscode or pycharm +addopts = "-p no:pytest_cov" +# https://code.visualstudio.com/docs/python/testing +# coverage is re-enabled in `tox.ini`. That approach is safer than +# `--no-cov` which prevents activation from tox.ini and which also fails +# when plugin is effectively missing. +doctest_optionflags = ["ALLOW_UNICODE", "ELLIPSIS"] +filterwarnings = ["error"] +junit_duration_report = "call" +# Our github annotation parser from .github/workflows/tox.yml requires xunit1 format. Ref: +# https://github.com/shyim/junit-report-annotations-action/issues/3#issuecomment-663241378 +junit_family = "xunit1" +junit_suite_name = "ansible_lint_test_suite" +minversion = "4.6.6" +norecursedirs = [ + "build", + "collections", + "dist", + "docs", + "src/ansiblelint/_vendor", + "src/ansible_lint.egg-info", + ".cache", + ".eggs", + ".git", + ".github", + ".tox", + "*.egg", + ".projects", +] +python_files = [ + "test_*.py", + # Ref: https://docs.pytest.org/en/latest/reference.html#confval-python_files + # Needed to discover legacy nose test modules: + "Test*.py", + # Needed to discover embedded Rule tests + "rules/*.py", +] +# Using --pyargs instead of testpath as we embed some tests +# See: https://github.com/pytest-dev/pytest/issues/6451#issuecomment-687043537 +# testpaths = +xfail_strict = true +markers = ["eco: Tests effects on a set of 3rd party ansible repositories"] + +[tool.setuptools.dynamic] +optional-dependencies.docs = { file = [".config/requirements-docs.txt"] } +optional-dependencies.test = { file = [".config/requirements-test.txt"] } +optional-dependencies.lock = { file = [".config/requirements-lock.txt"] } +dependencies = { file = [".config/requirements.in"] } + +[tool.setuptools_scm] +local_scheme = "no-local-version" +write_to = "src/ansiblelint/_version.py" diff --git a/src/ansiblelint/__init__.py b/src/ansiblelint/__init__.py new file mode 100644 index 0000000..0bd408f --- /dev/null +++ b/src/ansiblelint/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Main ansible-lint package.""" +from __future__ import annotations + +import ansiblelint._vendor +from ansiblelint.version import __version__ + +# make vendored top-level modules accessible EARLY + + +__all__ = ("__version__",) diff --git a/src/ansiblelint/__main__.py b/src/ansiblelint/__main__.py new file mode 100755 index 0000000..0437bc0 --- /dev/null +++ b/src/ansiblelint/__main__.py @@ -0,0 +1,468 @@ +#!/usr/bin/env python +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Command line implementation.""" + +from __future__ import annotations + +import errno +import logging +import os +import pathlib +import shutil +import site +import subprocess +import sys +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Callable, Iterator, TextIO + +from ansible_compat.config import ansible_version +from ansible_compat.prerun import get_cache_dir +from filelock import FileLock, Timeout + +from ansiblelint import cli +from ansiblelint._mockings import _perform_mockings_cleanup +from ansiblelint.app import get_app +from ansiblelint.color import ( + console, + console_options, + console_stderr, + reconfigure, + render_yaml, +) +from ansiblelint.config import get_version_warning, options +from ansiblelint.constants import EXIT_CONTROL_C_RC, GIT_CMD, LOCK_TIMEOUT_RC +from ansiblelint.file_utils import abspath, cwd, normpath +from ansiblelint.loaders import load_ignore_txt +from ansiblelint.skip_utils import normalize_tag +from ansiblelint.version import __version__ + +if TYPE_CHECKING: + from argparse import Namespace + + # RulesCollection must be imported lazily or ansible gets imported too early. + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import LintResult + + +_logger = logging.getLogger(__name__) + + +def initialize_logger(level: int = 0) -> None: + """Set up the global logging level based on the verbosity number.""" + # We are about to act on the root logger, which defaults to logging.WARNING. + # That is where our 0 (default) value comes from. + verbosity_map = { + -2: logging.CRITICAL, + -1: logging.ERROR, + 0: logging.WARNING, + 1: logging.INFO, + 2: logging.DEBUG, + } + + handler = logging.StreamHandler() + formatter = logging.Formatter("%(levelname)-8s %(message)s") + handler.setFormatter(formatter) + logger = logging.getLogger() + logger.addHandler(handler) + # Unknown logging level is treated as DEBUG + logging_level = verbosity_map.get(level, logging.DEBUG) + logger.setLevel(logging_level) + logging.captureWarnings(True) # pass all warnings.warn() messages through logging + # Use module-level _logger instance to validate it + _logger.debug("Logging initialized to level %s", logging_level) + + +def initialize_options(arguments: list[str] | None = None) -> None: + """Load config options and store them inside options module.""" + new_options = cli.get_config(arguments or []) + new_options.cwd = pathlib.Path.cwd() + + if new_options.colored is None: + new_options.colored = should_do_markup() + + # persist loaded configuration inside options module + for k, v in new_options.__dict__.items(): + setattr(options, k, v) + + # rename deprecated ids/tags to newer names + options.tags = [normalize_tag(tag) for tag in options.tags] + options.skip_list = [normalize_tag(tag) for tag in options.skip_list] + options.warn_list = [normalize_tag(tag) for tag in options.warn_list] + + options.configured = True + options.cache_dir = get_cache_dir(options.project_dir) + + # add a lock file so we do not have two instances running inside at the same time + os.makedirs(options.cache_dir, exist_ok=True) + + options.cache_dir_lock = None + if not options.offline: # pragma: no cover + options.cache_dir_lock = FileLock(f"{options.cache_dir}/.lock") + try: + options.cache_dir_lock.acquire(timeout=180) + except Timeout: # pragma: no cover + _logger.error( + "Timeout waiting for another instance of ansible-lint to release the lock." + ) + sys.exit(LOCK_TIMEOUT_RC) + + # Avoid extra output noise from Ansible about using devel versions + if "ANSIBLE_DEVEL_WARNING" not in os.environ: # pragma: no branch + os.environ["ANSIBLE_DEVEL_WARNING"] = "false" + + +def _do_list(rules: RulesCollection) -> int: + # On purpose lazy-imports to avoid pre-loading Ansible + # pylint: disable=import-outside-toplevel + from ansiblelint.generate_docs import ( + rules_as_docs, + rules_as_md, + rules_as_rich, + rules_as_str, + ) + + if options.list_rules: + _rule_format_map: dict[str, Callable[..., Any]] = { + "brief": rules_as_str, + "full": rules_as_rich, + "md": rules_as_md, + "docs": rules_as_docs, + } + + console.print( + _rule_format_map.get(options.format, rules_as_str)(rules), highlight=False + ) + return 0 + + if options.list_tags: + console.print(render_yaml(rules.list_tags())) + return 0 + + # we should not get here! + return 1 + + +# noinspection PyShadowingNames +def _do_transform(result: LintResult, opts: Namespace) -> None: + """Create and run Transformer.""" + if "yaml" in opts.skip_list: + # The transformer rewrites yaml files, but the user requested to skip + # the yaml rule or anything tagged with "yaml", so there is nothing to do. + return + + # On purpose lazy-imports to avoid loading transforms unless requested + # pylint: disable=import-outside-toplevel + from ansiblelint.transformer import Transformer + + transformer = Transformer(result, options) + + # this will mark any matches as fixed if the transforms repaired the issue + transformer.run() + + +def support_banner() -> None: + """Display support banner when running on unsupported platform.""" + if sys.version_info < (3, 9, 0): # pragma: no cover + prefix = "::warning::" if "GITHUB_ACTION" in os.environ else "WARNING: " + console_stderr.print( + f"{prefix}ansible-lint is no longer tested under Python {sys.version_info.major}.{sys.version_info.minor} and will soon require 3.9. Do not report bugs for this version.", + style="bold red", + ) + + +# pylint: disable=too-many-branches,too-many-statements +def main(argv: list[str] | None = None) -> int: # noqa: C901 + """Linter CLI entry point.""" + # alter PATH if needed (venv support) + path_inject() + + if argv is None: # pragma: no cover + argv = sys.argv + initialize_options(argv[1:]) + + console_options["force_terminal"] = options.colored + reconfigure(console_options) + + if options.version: + console.print( + f"ansible-lint [repr.number]{__version__}[/] using ansible [repr.number]{ansible_version()}[/]" + ) + msg = get_version_warning() + if msg: + console.print(msg) + support_banner() + sys.exit(0) + else: + support_banner() + + initialize_logger(options.verbosity) + _logger.debug("Options: %s", options) + _logger.debug(os.getcwd()) + + if options.progressive: + _logger.warning( + "Progressive mode is deprecated and will be removed in next major version, use ignore files instead: https://ansible-lint.readthedocs.io/configuring/#ignoring-rules-for-entire-files" + ) + + if not options.offline: + # pylint: disable=import-outside-toplevel + from ansiblelint.schemas import refresh_schemas + + refresh_schemas() + + # pylint: disable=import-outside-toplevel + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import _get_matches + + rules = RulesCollection(options.rulesdirs, profile_name=options.profile) + + if options.list_profiles: + from ansiblelint.generate_docs import profiles_as_rich + + console.print(profiles_as_rich()) + return 0 + + if options.list_rules or options.list_tags: + return _do_list(rules) + + app = get_app() + if isinstance(options.tags, str): + options.tags = options.tags.split(",") # pragma: no cover + result = _get_matches(rules, options) + + if options.write_list: + _do_transform(result, options) + + mark_as_success = True + if result.matches and options.progressive: + mark_as_success = False + _logger.info( + "Matches found, running again on previous revision in order to detect regressions" + ) + with _previous_revision(): + _logger.debug("Options: %s", options) + _logger.debug(os.getcwd()) + old_result = _get_matches(rules, options) + # remove old matches from current list + matches_delta = list(set(result.matches) - set(old_result.matches)) + if len(matches_delta) == 0: + _logger.warning( + "Total violations not increased since previous " + "commit, will mark result as success. (%s -> %s)", + len(old_result.matches), + len(matches_delta), + ) + mark_as_success = True + + ignored = 0 + for match in result.matches: + # if match is not new, mark is as ignored + if match not in matches_delta: + match.ignored = True + ignored += 1 + if ignored: + _logger.warning( + "Marked %s previously known violation(s) as ignored due to" + " progressive mode.", + ignored, + ) + + if options.strict and result.matches: + mark_as_success = False + + # Remove skip_list items from the result + result.matches = [m for m in result.matches if m.tag not in app.options.skip_list] + # Mark matches as ignored inside ignore file + ignore_map = load_ignore_txt() + for match in result.matches: + if match.tag in ignore_map[match.filename]: + match.ignored = True + + app.render_matches(result.matches) + + _perform_mockings_cleanup() + if options.cache_dir_lock: + options.cache_dir_lock.release() + pathlib.Path(options.cache_dir_lock.lock_file).unlink(missing_ok=True) + if options.mock_filters: + _logger.warning( + "The following filters were mocked during the run: %s", + ",".join(options.mock_filters), + ) + + return app.report_outcome(result, mark_as_success=mark_as_success) + + +@contextmanager +def _previous_revision() -> Iterator[None]: + """Create or update a temporary workdir containing the previous revision.""" + worktree_dir = f"{options.cache_dir}/old-rev" + # Update options.exclude_paths to include use the temporary workdir. + rel_exclude_paths = [normpath(p) for p in options.exclude_paths] + options.exclude_paths = [abspath(p, worktree_dir) for p in rel_exclude_paths] + revision = subprocess.run( + [*GIT_CMD, "rev-parse", "HEAD^1"], + check=True, + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ).stdout.strip() + _logger.info("Previous revision SHA: %s", revision) + path = pathlib.Path(worktree_dir) + if path.exists(): + shutil.rmtree(worktree_dir) + path.mkdir(parents=True, exist_ok=True) + # Run check will fail if worktree_dir already exists + # pylint: disable=subprocess-run-check + subprocess.run( + [*GIT_CMD, "worktree", "add", "-f", worktree_dir], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + try: + with cwd(worktree_dir): + subprocess.run( + [*GIT_CMD, "checkout", revision], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + yield + finally: + options.exclude_paths = [abspath(p, os.getcwd()) for p in rel_exclude_paths] + + +def _run_cli_entrypoint() -> None: + """Invoke the main entrypoint with current CLI args. + + This function also processes the runtime exceptions. + """ + try: + sys.exit(main(sys.argv)) + except OSError as exc: + # NOTE: Only "broken pipe" is acceptable to ignore + if exc.errno != errno.EPIPE: # pragma: no cover + raise + except KeyboardInterrupt: # pragma: no cover + sys.exit(EXIT_CONTROL_C_RC) + except RuntimeError as exc: # pragma: no cover + raise SystemExit(exc) from exc + + +def path_inject() -> None: + """Add python interpreter path to top of PATH to fix outside venv calling.""" + # This make it possible to call ansible-lint that was installed inside a + # virtualenv without having to pre-activate it. Otherwise subprocess will + # either fail to find ansible executables or call the wrong ones. + # + # This must be run before we do run any subprocesses, and loading config + # does this as part of the ansible detection. + paths = [x for x in os.environ.get("PATH", "").split(os.pathsep) if x] + + # Expand ~ in PATH as it known to break many tools + expanded = False + for idx, path in enumerate(paths): + if "~" in path: # pragma: no cover + paths[idx] = os.path.expanduser(path) + expanded = True + if expanded: # pragma: no cover + # flake8: noqa: T201 + print( + "WARNING: PATH altered to expand ~ in it. Read https://stackoverflow.com/a/44704799/99834 and correct your system configuration.", + file=sys.stderr, + ) + + inject_paths = [] + + userbase_bin_path = f"{site.getuserbase()}/bin" + if userbase_bin_path not in paths and os.path.exists( + f"{userbase_bin_path}/bin/ansible" + ): + inject_paths.append(userbase_bin_path) + + py_path = os.path.dirname(sys.executable) + if py_path not in paths and os.path.exists(f"{py_path}/ansible"): + inject_paths.append(py_path) + + if inject_paths: + # flake8: noqa: T201 + print( + f"WARNING: PATH altered to include {', '.join(inject_paths)} :: This is usually a sign of broken local setup, which can cause unexpected behaviors.", + file=sys.stderr, + ) + if inject_paths or expanded: + os.environ["PATH"] = os.pathsep.join([*inject_paths, *paths]) + + # We do know that finding ansible in PATH does not guarantee that it is + # functioning or that is in fact the same version that was installed as + # our dependency, but addressing this would be done by ansible-compat. + for cmd in ("ansible", "git"): + if not shutil.which(cmd): + raise RuntimeError(f"Failed to find runtime dependency '{cmd}' in PATH") + + +# Based on Ansible implementation +def to_bool(value: Any) -> bool: # pragma: no cover + """Return a bool for the arg.""" + if value is None or isinstance(value, bool): + return bool(value) + if isinstance(value, str): + value = value.lower() + if value in ("yes", "on", "1", "true", 1): + return True + return False + + +def should_do_markup(stream: TextIO = sys.stdout) -> bool: # pragma: no cover + """Decide about use of ANSI colors.""" + py_colors = None + + # https://xkcd.com/927/ + for env_var in ["PY_COLORS", "CLICOLOR", "FORCE_COLOR", "ANSIBLE_FORCE_COLOR"]: + value = os.environ.get(env_var, None) + if value is not None: + py_colors = to_bool(value) + break + + # If deliberately disabled colors + if os.environ.get("NO_COLOR", None): + return False + + # User configuration requested colors + if py_colors is not None: + return to_bool(py_colors) + + term = os.environ.get("TERM", "") + if "xterm" in term: + return True + + if term == "dumb": + return False + + # Use tty detection logic as last resort because there are numerous + # factors that can make isatty return a misleading value, including: + # - stdin.isatty() is the only one returning true, even on a real terminal + # - stderr returning false if user user uses a error stream coloring solution + return stream.isatty() + + +if __name__ == "__main__": + _run_cli_entrypoint() diff --git a/src/ansiblelint/_internal/__init__.py b/src/ansiblelint/_internal/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/ansiblelint/_internal/__init__.py diff --git a/src/ansiblelint/_internal/internal_error.md b/src/ansiblelint/_internal/internal_error.md new file mode 100644 index 0000000..8db5e5e --- /dev/null +++ b/src/ansiblelint/_internal/internal_error.md @@ -0,0 +1,43 @@ +# internal-error + +This error can also be caused by internal bugs but also by custom rules. +Instead of just stopping tool execution, we generate the errors and continue +processing other files. This allows users to add this rule to their `warn_list` +until the root cause is fixed. + +Keep in mind that once an `internal-error` is found on a specific file, no +other rules will be executed on that same file. + +In almost all cases you will see more detailed information regarding the +original error or runtime exception that triggered this rule. + +If these files are broken on purpose, like some test fixtures, you need to add +them to the `exclude_paths`. + +## Problematic code + +```yaml +--- +- name: Some title {{ # <-- Ansible will not load this invalid jinja template + hosts: localhost + tasks: [] +``` + +## Correct code + +```yaml +--- +- name: Some title + hosts: localhost + tasks: [] +``` + +## ERROR! No hosts matched the subscripted pattern + +If you see this error, it means that you tried to index a host group variable +that is using an index above its size. + +Instead of doing something like `hosts: all[1]` which assumes that you have +at least two hosts in your current inventory, you better write something like +`hosts: "{{ all[1] | default([]) }}`, which is safe and do not produce runtime +errors. Use safe fallbacks to make your code more resilient. diff --git a/src/ansiblelint/_internal/load-failure.md b/src/ansiblelint/_internal/load-failure.md new file mode 100644 index 0000000..78daa0d --- /dev/null +++ b/src/ansiblelint/_internal/load-failure.md @@ -0,0 +1,12 @@ +# load-failure + +"Linter failed to process a file, possible invalid file. Possible reasons: + +* contains unsupported encoding (only UTF-8 is supported) +* not an Ansible file +* it contains some unsupported custom YAML objects (`!!` prefix) +* it was not able to decrypt an inline `!vault` block. + +This violation **is not** skippable, so it cannot be added to the `warn_list` +or the `skip_list`. If a vault decryption issue cannot be avoided, the +offending file can be added to `exclude_paths` configuration. diff --git a/src/ansiblelint/_internal/parser-error.md b/src/ansiblelint/_internal/parser-error.md new file mode 100644 index 0000000..f6c7649 --- /dev/null +++ b/src/ansiblelint/_internal/parser-error.md @@ -0,0 +1,5 @@ +# parser-error + +**AnsibleParserError.** + +Ansible parser fails; this usually indicates an invalid file. diff --git a/src/ansiblelint/_internal/rules.py b/src/ansiblelint/_internal/rules.py new file mode 100644 index 0000000..4d0bf49 --- /dev/null +++ b/src/ansiblelint/_internal/rules.py @@ -0,0 +1,194 @@ +"""Internally used rule classes.""" +from __future__ import annotations + +import inspect +import logging +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import RULE_DOC_URL + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + from ansiblelint.rules import RulesCollection + +_logger = logging.getLogger(__name__) +LOAD_FAILURE_MD = """\ +# load-failure + +"Linter failed to process a file, possible invalid file. Possible reasons: + +* contains unsupported encoding (only UTF-8 is supported) +* not an Ansible file +* it contains some unsupported custom YAML objects (`!!` prefix) +* it was not able to decrypt an inline `!vault` block. + +This violation **is not** skippable, so it cannot be added to the `warn_list` +or the `skip_list`. If a vault decryption issue cannot be avoided, the +offending file can be added to `exclude_paths` configuration. +""" + + +# Derived rules are likely to want to access class members, so: +# pylint: disable=unused-argument +class BaseRule: + """Root class used by Rules.""" + + id: str = "" + tags: list[str] = [] + description: str = "" + version_added: str = "" + severity: str = "" + link: str = "" + has_dynamic_tags: bool = False + needs_raw_task: bool = False + # We use _order to sort rules and to ensure that some run before others, + # _order 0 for internal rules + # _order 1 for rules that check that data can be loaded + # _order 5 implicit for normal rules + _order: int = 5 + _help: str | None = None + # Added when a rule is registered into a collection, gives access to options + _collection: RulesCollection | None = None + + @property + def help(self) -> str: + """Return a help markdown string for the rule.""" + if self._help is None: + self._help = "" + md_file = ( + Path(inspect.getfile(self.__class__)).parent + / f"{self.id.replace('-', '_')}.md" + ) + if md_file.exists(): + self._help = md_file.read_text(encoding="utf-8") + return self._help + + @property + def url(self) -> str: + """Return rule documentation url.""" + url = self.link + if not url: # pragma: no cover + url = RULE_DOC_URL + if self.id: + url += self.id + "/" + return url + + @property + def shortdesc(self) -> str: + """Return the short description of the rule, basically the docstring.""" + return self.__doc__ or "" + + def getmatches(self, file: Lintable) -> list[MatchError]: + """Return all matches while ignoring exceptions.""" + matches = [] + if not file.path.is_dir(): + for method in [self.matchlines, self.matchtasks, self.matchyaml]: + try: + matches.extend(method(file)) + except Exception as exc: # pylint: disable=broad-except + _logger.warning( + "Ignored exception from %s.%s while processing %s: %s", + self.__class__.__name__, + method, + str(file), + exc, + ) + else: + matches.extend(self.matchdir(file)) + return matches + + def matchlines(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific line.""" + return [] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError | list[MatchError]: + """Confirm if current rule is matching a specific task. + + If ``needs_raw_task`` (a class level attribute) is ``True``, then + the original task (before normalization) will be made available under + ``task["__raw_task__"]``. + """ + return False + + def matchtasks(self, file: Lintable) -> list[MatchError]: + """Return matches for a tasks file.""" + return [] + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific YAML text.""" + return [] + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + return [] + + def matchdir(self, lintable: Lintable) -> list[MatchError]: + """Return matches for lintable folders.""" + return [] + + def verbose(self) -> str: + """Return a verbose representation of the rule.""" + return self.id + ": " + self.shortdesc + "\n " + self.description + + def match(self, line: str) -> bool | str: + """Confirm if current rule matches the given string.""" + return False + + def __lt__(self, other: BaseRule) -> bool: + """Enable us to sort rules by their id.""" + return (self._order, self.id) < (other._order, other.id) + + def __repr__(self) -> str: + """Return a AnsibleLintRule instance representation.""" + return self.id + ": " + self.shortdesc + + +# pylint: enable=unused-argument + + +class RuntimeErrorRule(BaseRule): + """Unexpected internal error.""" + + id = "internal-error" + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v5.0.0" + _order = 0 + + +class AnsibleParserErrorRule(BaseRule): + """AnsibleParserError.""" + + id = "parser-error" + description = "Ansible parser fails; this usually indicates an invalid file." + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v5.0.0" + _order = 0 + + +class LoadingFailureRule(BaseRule): + """Failed to load or parse file.""" + + id = "load-failure" + description = "Linter failed to process a file, possible invalid file." + severity = "VERY_HIGH" + tags = ["core", "unskippable"] + version_added = "v4.3.0" + help = LOAD_FAILURE_MD + _order = 0 + + +class WarningRule(BaseRule): + """Other warnings detected during run.""" + + id = "warning" + severity = "LOW" + # should remain experimental as that would keep it warning only + tags = ["core", "experimental"] + version_added = "v6.8.0" + _order = 0 diff --git a/src/ansiblelint/_internal/warning.md b/src/ansiblelint/_internal/warning.md new file mode 100644 index 0000000..97d2577 --- /dev/null +++ b/src/ansiblelint/_internal/warning.md @@ -0,0 +1,9 @@ +# warning + +`warning` is a special type of internal rule that is used to report generic +runtime warnings found during execution. As stated by its name, they are not +counted as errors, so they do not influence the final outcome. + +- `warning[raw-non-string]` indicates that you are using + `[raw](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/raw_module.html#ansible-collections-ansible-builtin-raw-module)` + module with non-string arguments, which is not supported by Ansible. diff --git a/src/ansiblelint/_mockings.py b/src/ansiblelint/_mockings.py new file mode 100644 index 0000000..6b57d6c --- /dev/null +++ b/src/ansiblelint/_mockings.py @@ -0,0 +1,86 @@ +"""Utilities for mocking ansible modules and roles.""" +from __future__ import annotations + +import logging +import os +import re +import sys + +from ansiblelint.config import options +from ansiblelint.constants import ANSIBLE_MOCKED_MODULE, INVALID_CONFIG_RC + +_logger = logging.getLogger(__name__) + + +def _make_module_stub(module_name: str) -> None: + # a.b.c is treated a collection + if re.match(r"^(\w+|\w+\.\w+\.[\.\w]+)$", module_name): + parts = module_name.split(".") + if len(parts) < 3: + path = f"{options.cache_dir}/modules" + module_file = f"{options.cache_dir}/modules/{module_name}.py" + namespace = None + collection = None + else: + namespace = parts[0] + collection = parts[1] + path = f"{ options.cache_dir }/collections/ansible_collections/{ namespace }/{ collection }/plugins/modules/{ '/'.join(parts[2:-1]) }" + module_file = f"{path}/{parts[-1]}.py" + os.makedirs(path, exist_ok=True) + _write_module_stub( + filename=module_file, + name=module_file, + namespace=namespace, + collection=collection, + ) + else: + _logger.error("Config error: %s is not a valid module name.", module_name) + sys.exit(INVALID_CONFIG_RC) + + +def _write_module_stub( + filename: str, + name: str, + namespace: str | None = None, + collection: str | None = None, +) -> None: + """Write module stub to disk.""" + body = ANSIBLE_MOCKED_MODULE.format( + name=name, collection=collection, namespace=namespace + ) + with open(filename, "w", encoding="utf-8") as f: + f.write(body) + + +# pylint: disable=too-many-branches +def _perform_mockings() -> None: # noqa: C901 + """Mock modules and roles.""" + for role_name in options.mock_roles: + if re.match(r"\w+\.\w+\.\w+$", role_name): + namespace, collection, role_dir = role_name.split(".") + path = f"{options.cache_dir}/collections/ansible_collections/{ namespace }/{ collection }/roles/{ role_dir }/" + else: + path = f"{options.cache_dir}/roles/{role_name}" + # Avoid error from makedirs if destination is a broken symlink + if os.path.islink(path) and not os.path.exists(path): # pragma: no cover + _logger.warning("Removed broken symlink from %s", path) + os.unlink(path) + os.makedirs(path, exist_ok=True) + + if options.mock_modules: + for module_name in options.mock_modules: + _make_module_stub(module_name) + + +def _perform_mockings_cleanup() -> None: # noqa: C901 + """Clean up mocked modules and roles.""" + for role_name in options.mock_roles: + if re.match(r"\w+\.\w+\.\w+$", role_name): + namespace, collection, role_dir = role_name.split(".") + path = f"{options.cache_dir}/collections/ansible_collections/{ namespace }/{ collection }/roles/{ role_dir }/" + else: + path = f"{options.cache_dir}/roles/{role_name}" + try: + os.rmdir(path) + except OSError: + pass diff --git a/src/ansiblelint/_vendor/__init__.py b/src/ansiblelint/_vendor/__init__.py new file mode 100644 index 0000000..9cd4ee3 --- /dev/null +++ b/src/ansiblelint/_vendor/__init__.py @@ -0,0 +1,48 @@ +import os +import pkgutil +import sys +import warnings + +# This package exists to host vendored top-level Python packages for downstream packaging. Any Python packages +# installed beneath this one will be masked from the Ansible loader, and available from the front of sys.path. +# It is expected that the vendored packages will be loaded very early, so a warning will be fired on import of +# the top-level ansible package if any packages beneath this are already loaded at that point. +# +# Python packages may be installed here during downstream packaging using something like: +# pip install --upgrade -t (path to this dir) cryptography pyyaml packaging jinja2 + +# mask vendored content below this package from being accessed as a subpackage +__path__ = [] + + +def _ensure_vendored_path_entry() -> None: + """ + Ensure that any downstream-bundled content beneath this package is available at the top of sys.path + """ + # patch our vendored dir onto sys.path + vendored_path_entry = os.path.dirname(__file__) + vendored_module_names = { + m[1] for m in pkgutil.iter_modules([vendored_path_entry], "") + } # m[1] == m.name + + if vendored_module_names: + # patch us early to load vendored deps transparently + if vendored_path_entry in sys.path: + # handle reload case by removing the existing entry, wherever it might be + sys.path.remove(vendored_path_entry) + sys.path.insert(0, vendored_path_entry) + + already_loaded_vendored_modules = set(sys.modules.keys()).intersection( + vendored_module_names + ) + + if already_loaded_vendored_modules: + warnings.warn( + "One or more Python packages bundled by this ansible-lint distribution were already " + "loaded ({}). This may result in undefined behavior.".format( + ", ".join(sorted(already_loaded_vendored_modules)) + ) + ) + + +_ensure_vendored_path_entry() diff --git a/src/ansiblelint/_vendor/ansible_compat b/src/ansiblelint/_vendor/ansible_compat new file mode 120000 index 0000000..cd5012a --- /dev/null +++ b/src/ansiblelint/_vendor/ansible_compat @@ -0,0 +1 @@ +../../../.projects/ansible-compat/src/ansible_compat
\ No newline at end of file diff --git a/src/ansiblelint/app.py b/src/ansiblelint/app.py new file mode 100644 index 0000000..7d14504 --- /dev/null +++ b/src/ansiblelint/app.py @@ -0,0 +1,360 @@ +"""Application.""" +from __future__ import annotations + +import itertools +import logging +import os +from functools import lru_cache +from typing import TYPE_CHECKING, Any + +from ansible_compat.runtime import Runtime +from rich.markup import escape +from rich.table import Table + +from ansiblelint import formatters +from ansiblelint._mockings import _perform_mockings +from ansiblelint.color import console, console_stderr, render_yaml +from ansiblelint.config import PROFILES, get_version_warning +from ansiblelint.config import options as default_options +from ansiblelint.constants import RULE_DOC_URL, SUCCESS_RC, VIOLATIONS_FOUND_RC +from ansiblelint.errors import MatchError +from ansiblelint.loaders import IGNORE_TXT +from ansiblelint.stats import SummarizedResults, TagStats + +if TYPE_CHECKING: + from argparse import Namespace + from typing import Dict, Set # pylint: disable=ungrouped-imports + + from ansiblelint._internal.rules import BaseRule + from ansiblelint.file_utils import Lintable + from ansiblelint.runner import LintResult + + +_logger = logging.getLogger(__package__) + + +class App: + """App class represents an execution of the linter.""" + + def __init__(self, options: Namespace): + """Construct app run based on already loaded configuration.""" + options.skip_list = _sanitize_list_options(options.skip_list) + options.warn_list = _sanitize_list_options(options.warn_list) + + self.options = options + + formatter_factory = choose_formatter_factory(options) + self.formatter = formatter_factory(options.cwd, options.display_relative_path) + + # Without require_module, our _set_collections_basedir may fail + self.runtime = Runtime(isolated=True, require_module=True) + + def render_matches(self, matches: list[MatchError]) -> None: + """Display given matches (if they are not fixed).""" + matches = [match for match in matches if not match.fixed] + + if isinstance( + self.formatter, + (formatters.CodeclimateJSONFormatter, formatters.SarifFormatter), + ): + # If formatter CodeclimateJSONFormatter or SarifFormatter is chosen, + # then print only the matches in JSON + console.print( + self.formatter.format_result(matches), markup=False, highlight=False + ) + return + + ignored_matches = [match for match in matches if match.ignored] + fatal_matches = [match for match in matches if not match.ignored] + # Displayed ignored matches first + if ignored_matches: + _logger.warning( + "Listing %s violation(s) marked as ignored, likely already known", + len(ignored_matches), + ) + for match in ignored_matches: + if match.ignored: + # highlight must be off or apostrophes may produce unexpected results + console.print(self.formatter.format(match), highlight=False) + if fatal_matches: + _logger.warning( + "Listing %s violation(s) that are fatal", len(fatal_matches) + ) + for match in fatal_matches: + if not match.ignored: + console.print(self.formatter.format(match), highlight=False) + + # If run under GitHub Actions we also want to emit output recognized by it. + if os.getenv("GITHUB_ACTIONS") == "true" and os.getenv("GITHUB_WORKFLOW"): + formatter = formatters.AnnotationsFormatter(self.options.cwd, True) + for match in itertools.chain(fatal_matches, ignored_matches): + console.print(formatter.format(match), markup=False, highlight=False) + + # If sarif_file is set, we also dump the results to a sarif file. + if self.options.sarif_file: + sarif = formatters.SarifFormatter(self.options.cwd, True) + json = sarif.format_result(matches) + with open(self.options.sarif_file, "w", encoding="utf-8") as sarif_file: + sarif_file.write(json) + + def count_results(self, matches: list[MatchError]) -> SummarizedResults: + """Count failures and warnings in matches.""" + result = SummarizedResults() + + for match in matches: + # any ignores match counts as a warning + if match.ignored: + result.warnings += 1 + continue + # tag can include a sub-rule id: `yaml[document-start]` + # rule.id is the generic rule id: `yaml` + # *rule.tags is the list of the rule's tags (categories): `style` + if match.tag not in result.tag_stats: + result.tag_stats[match.tag] = TagStats( + tag=match.tag, count=1, associated_tags=match.rule.tags + ) + else: + result.tag_stats[match.tag].count += 1 + + if {match.tag, match.rule.id, *match.rule.tags}.isdisjoint( + self.options.warn_list + ): + # not in warn_list + if match.fixed: + result.fixed_failures += 1 + else: + result.failures += 1 + else: + result.tag_stats[match.tag].warning = True + if match.fixed: + result.fixed_warnings += 1 + else: + result.warnings += 1 + return result + + @staticmethod + def count_lintables(files: set[Lintable]) -> tuple[int, int]: + """Count total and modified files.""" + files_count = len(files) + changed_files_count = len([file for file in files if file.updated]) + return files_count, changed_files_count + + @staticmethod + def _get_matched_skippable_rules( + matches: list[MatchError], + ) -> dict[str, BaseRule]: + """Extract the list of matched rules, if skippable, from the list of matches.""" + matches_unignored = [match for match in matches if not match.ignored] + # match.tag is more specialized than match.rule.id + matched_rules = { + match.tag or match.rule.id: match.rule for match in matches_unignored + } + # remove unskippable rules from the list + for rule_id in list(matched_rules.keys()): + if "unskippable" in matched_rules[rule_id].tags: + matched_rules.pop(rule_id) + return matched_rules + + def report_outcome(self, result: LintResult, mark_as_success: bool = False) -> int: + """Display information about how to skip found rules. + + Returns exit code, 2 if errors were found, 0 when only warnings were found. + """ + msg = "" + + summary = self.count_results(result.matches) + files_count, changed_files_count = self.count_lintables(result.files) + + matched_rules = self._get_matched_skippable_rules(result.matches) + + if matched_rules and self.options.generate_ignore: + console_stderr.print(f"Writing ignore file to {IGNORE_TXT}") + lines = set() + for rule in result.matches: + lines.add(f"{rule.filename} {rule.tag}\n") + with open(IGNORE_TXT, "w", encoding="utf-8") as ignore_file: + ignore_file.write( + "# This file contains ignores rule violations for ansible-lint\n" + ) + ignore_file.writelines(sorted(list(lines))) + elif matched_rules and not self.options.quiet: + console_stderr.print( + "Read [link=https://ansible-lint.readthedocs.io/configuring/#ignoring-rules-for-entire-files]documentation[/link] for instructions on how to ignore specific rule violations." + ) + + # Do not deprecate the old tags just yet. Why? Because it is not currently feasible + # to migrate old tags to new tags. There are a lot of things out there that still + # use ansible-lint 4 (for example, Ansible Galaxy and Automation Hub imports). If we + # replace the old tags, those tools will report warnings. If we do not replace them, + # ansible-lint 5 will report warnings. + # + # We can do the deprecation once the ecosystem caught up at least a bit. + # for k, v in used_old_tags.items(): + # _logger.warning( + # "Replaced deprecated tag '%s' with '%s' but it will become an " + # "error in the future.", + # k, + # v, + # ) + + if self.options.write_list and "yaml" in self.options.skip_list: + _logger.warning( + "You specified '--write', but no files can be modified " + "because 'yaml' is in 'skip_list'." + ) + + if mark_as_success and summary.failures and not self.options.progressive: + mark_as_success = False + + if not self.options.quiet: + console_stderr.print(render_yaml(msg)) + self.report_summary( + summary, changed_files_count, files_count, is_success=mark_as_success + ) + + return SUCCESS_RC if mark_as_success else VIOLATIONS_FOUND_RC + + def report_summary( # pylint: disable=too-many-branches,too-many-locals + self, + summary: SummarizedResults, + changed_files_count: int, + files_count: int, + is_success: bool, + ) -> None: + """Report match and file counts.""" + # sort the stats by profiles + idx = 0 + rule_order = {} + + for profile, profile_config in PROFILES.items(): + for rule in profile_config["rules"]: + # print(profile, rule) + rule_order[rule] = (idx, profile) + idx += 1 + _logger.debug("Determined rule-profile order: %s", rule_order) + failed_profiles = set() + for tag, tag_stats in summary.tag_stats.items(): + if tag in rule_order: + tag_stats.order, tag_stats.profile = rule_order.get(tag, (idx, "")) + elif "[" in tag: + tag_stats.order, tag_stats.profile = rule_order.get( + tag.split("[")[0], (idx, "") + ) + if tag_stats.profile: + failed_profiles.add(tag_stats.profile) + summary.sort() + + if changed_files_count: + console_stderr.print(f"Modified {changed_files_count} files.") + + # determine which profile passed + summary.passed_profile = "" + passed_profile_count = 0 + for profile in PROFILES.keys(): + if profile in failed_profiles: + break + if profile != summary.passed_profile: + summary.passed_profile = profile + passed_profile_count += 1 + + stars = "" + if summary.tag_stats: + table = Table( + title="Rule Violation Summary", + collapse_padding=True, + box=None, + show_lines=False, + ) + table.add_column("count", justify="right") + table.add_column("tag") + table.add_column("profile") + table.add_column("rule associated tags") + for tag, stats in summary.tag_stats.items(): + table.add_row( + str(stats.count), + f"[link={RULE_DOC_URL}{ tag.split('[')[0] }]{escape(tag)}[/link]", + stats.profile, + f"{', '.join(stats.associated_tags)}{' (warning)' if stats.warning else ''}", + style="yellow" if stats.warning else "red", + ) + # rate stars for the top 5 profiles (min would not get + rating = 5 - (len(PROFILES.keys()) - passed_profile_count) + if 0 < rating < 6: + stars = f", {rating}/5 star rating" + + console_stderr.print(table) + console_stderr.print() + + if is_success: + msg = "[green]Passed[/] with " + else: + msg = "[red][bold]Failed[/][/] after " + + if summary.passed_profile: + msg += f"[bold]{summary.passed_profile}[/] profile" + if stars: + msg += stars + + msg += f": {summary.failures} failure(s), {summary.warnings} warning(s)" + if summary.fixed: + msg += f", and fixed {summary.fixed} issue(s)" + msg += f" on {files_count} files." + + # on offline mode and when run under pre-commit we do not want to + # check for updates. + if not self.options.offline and os.environ.get("PRE_COMMIT", "0") != "1": + version_warning = get_version_warning() + if version_warning: + msg += f"\n{version_warning}" + + console_stderr.print(msg) + + +def choose_formatter_factory( + options_list: Namespace, +) -> type[formatters.BaseFormatter[Any]]: + """Select an output formatter based on the incoming command line arguments.""" + r: type[formatters.BaseFormatter[Any]] = formatters.Formatter + if options_list.format == "quiet": + r = formatters.QuietFormatter + elif options_list.format in ("json", "codeclimate"): + r = formatters.CodeclimateJSONFormatter + elif options_list.format == "sarif": + r = formatters.SarifFormatter + elif options_list.parseable or options_list.format == "pep8": + r = formatters.ParseableFormatter + return r + + +def _sanitize_list_options(tag_list: list[str]) -> list[str]: + """Normalize list options.""" + # expand comma separated entries + tags = set() + for tag in tag_list: + tags.update(str(tag).split(",")) + # remove duplicates, and return as sorted list + return sorted(set(tags)) + + +@lru_cache +def get_app() -> App: + """Return the application instance, caching the return value.""" + offline = default_options.offline + app = App(options=default_options) + # Make linter use the cache dir from compat + default_options.cache_dir = app.runtime.cache_dir + + role_name_check = 0 + if "role-name" in app.options.warn_list: + role_name_check = 1 + elif "role-name" in app.options.skip_list: + role_name_check = 2 + + # mocking must happen before prepare_environment or galaxy install might + # fail. + _perform_mockings() + app.runtime.prepare_environment( + install_local=(not offline), offline=offline, role_name_check=role_name_check + ) + + return app diff --git a/src/ansiblelint/cli.py b/src/ansiblelint/cli.py new file mode 100644 index 0000000..9540d23 --- /dev/null +++ b/src/ansiblelint/cli.py @@ -0,0 +1,593 @@ +"""CLI parser setup and helpers.""" +from __future__ import annotations + +import argparse +import logging +import os +import sys +from argparse import Namespace +from pathlib import Path +from typing import Any, Callable, Sequence + +from ansiblelint.config import DEFAULT_KINDS, DEFAULT_WARN_LIST, PROFILES +from ansiblelint.constants import ( + CUSTOM_RULESDIR_ENVVAR, + DEFAULT_RULESDIR, + INVALID_CONFIG_RC, +) +from ansiblelint.file_utils import ( + Lintable, + abspath, + expand_path_vars, + guess_project_dir, + normpath, +) +from ansiblelint.schemas.main import validate_file_schema +from ansiblelint.yaml_utils import clean_json + +_logger = logging.getLogger(__name__) +_PATH_VARS = [ + "exclude_paths", + "rulesdir", +] + + +def expand_to_normalized_paths( + config: dict[str, Any], base_dir: str | None = None +) -> None: + """Mutate given config normalizing any path values in it.""" + # config can be None (-c /dev/null) + if not config: + return + base_dir = base_dir or os.getcwd() + for paths_var in _PATH_VARS: + if paths_var not in config: + continue # Cause we don't want to add a variable not present + + normalized_paths = [] + for path in config.pop(paths_var): + normalized_path = abspath(expand_path_vars(path), base_dir=base_dir) + + normalized_paths.append(normalized_path) + + config[paths_var] = normalized_paths + + +def load_config(config_file: str) -> dict[Any, Any]: + """Load configuration from disk.""" + config_path = None + if config_file: + config_path = os.path.abspath(config_file) + if not os.path.exists(config_path): + _logger.error("Config file not found '%s'", config_path) + sys.exit(INVALID_CONFIG_RC) + config_path = config_path or get_config_path() + if not config_path or not os.path.exists(config_path): + # a missing default config file should not trigger an error + return {} + + config_lintable = Lintable( + config_path, kind="ansible-lint-config", base_kind="text/yaml" + ) + + for error in validate_file_schema(config_lintable): + _logger.error("Invalid configuration file %s. %s", config_path, error) + sys.exit(INVALID_CONFIG_RC) + + config = clean_json(config_lintable.data) + if not isinstance(config, dict): + raise RuntimeError("Schema failed to properly validate the config file.") + config["config_file"] = config_path + config_dir = os.path.dirname(config_path) + expand_to_normalized_paths(config, config_dir) + + return config + + +def get_config_path(config_file: str | None = None) -> str | None: + """Return local config file.""" + if config_file: + project_filenames = [config_file] + else: + project_filenames = [".ansible-lint", ".config/ansible-lint.yml"] + parent = tail = os.getcwd() + while tail: + for project_filename in project_filenames: + filename = os.path.abspath(os.path.join(parent, project_filename)) + if os.path.exists(filename): + return filename + if os.path.exists(os.path.abspath(os.path.join(parent, ".git"))): + # Avoid looking outside .git folders as we do not want end-up + # picking config files from upper level projects if current + # project has no config. + return None + (parent, tail) = os.path.split(parent) + return None + + +class AbspathArgAction(argparse.Action): + """Argparse action to convert relative paths to absolute paths.""" + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + if isinstance(values, (str, Path)): + values = [values] + if values: + normalized_values = [ + Path(expand_path_vars(str(path))).resolve() for path in values + ] + previous_values = getattr(namespace, self.dest, []) + setattr(namespace, self.dest, previous_values + normalized_values) + + +class WriteArgAction(argparse.Action): + """Argparse action to handle the --write flag with optional args.""" + + _default = "__default__" + + # noinspection PyShadowingBuiltins + def __init__( # pylint: disable=too-many-arguments,redefined-builtin + self, + option_strings: list[str], + dest: str, + nargs: int | str | None = None, + const: Any = None, + default: Any = None, + type: Callable[[str], Any] | None = None, + choices: list[Any] | None = None, + required: bool = False, + help: str | None = None, + metavar: str | None = None, + ) -> None: + """Create the argparse action with WriteArg-specific defaults.""" + if nargs is not None: + raise ValueError("nargs for WriteArgAction must not be set.") + if const is not None: + raise ValueError("const for WriteArgAction must not be set.") + super().__init__( + option_strings=option_strings, + dest=dest, + nargs="?", # either 0 (--write) or 1 (--write=a,b,c) argument + const=self._default, # --write (no option) implicitly stores this + default=default, + type=type, + choices=choices, + required=required, + help=help, + metavar=metavar, + ) + + def __call__( + self, + parser: argparse.ArgumentParser, + namespace: Namespace, + values: str | Sequence[Any] | None, + option_string: str | None = None, + ) -> None: + lintables = getattr(namespace, "lintables", None) + if not lintables and isinstance(values, str): + # args are processed in order. + # If --write is after lintables, then that is not ambiguous. + # But if --write comes first, then it might actually be a lintable. + maybe_lintable = Path(values) + if maybe_lintable.exists(): + setattr(namespace, "lintables", [values]) + values = [] + if isinstance(values, str): + values = values.split(",") + default = [self.const] if isinstance(self.const, str) else self.const + previous_values = getattr(namespace, self.dest, default) or default + if not values: + values = previous_values + elif previous_values != default: + values = previous_values + values + setattr(namespace, self.dest, values) + + @classmethod + def merge_write_list_config( + cls, from_file: list[str], from_cli: list[str] + ) -> list[str]: + """Combine the write_list from file config with --write CLI arg. + + Handles the implicit "all" when "__default__" is present and file config is empty. + """ + if not from_file or "none" in from_cli: + # --write is the same as --write=all + return ["all" if value == cls._default else value for value in from_cli] + # --write means use the config from the config file + from_cli = [value for value in from_cli if value != cls._default] + return from_file + from_cli + + +def get_cli_parser() -> argparse.ArgumentParser: + """Initialize an argument parser.""" + parser = argparse.ArgumentParser() + + listing_group = parser.add_mutually_exclusive_group() + listing_group.add_argument( + "-P", + "--list-profiles", + dest="list_profiles", + default=False, + action="store_true", + help="List all profiles, no formatting options available.", + ) + listing_group.add_argument( + "-L", + "--list-rules", + dest="list_rules", + default=False, + action="store_true", + help="List all the rules. For listing rules only the following formats " + "for argument -f are supported: {brief, full, md} with 'brief' as default.", + ) + listing_group.add_argument( + "-T", + "--list-tags", + dest="list_tags", + action="store_true", + help="List all the tags and the rules they cover. Increase the verbosity level " + "with `-v` to include 'opt-in' tag and its rules.", + ) + parser.add_argument( + "-f", + "--format", + dest="format", + default=None, + choices=[ + "brief", + # "plain", + "full", + "md", + "json", + "codeclimate", + "quiet", + "pep8", + "sarif", + "docs", # internally used + ], + help="stdout formatting, json being an alias for codeclimate. (default: %(default)s)", + ) + parser.add_argument("--sarif-file", default=None, help="SARIF output file") + parser.add_argument( + "-q", + dest="quiet", + default=0, + action="count", + help="quieter, reduce verbosity, can be specified twice.", + ) + parser.add_argument( + "--profile", + dest="profile", + default=None, + action="store", + choices=PROFILES.keys(), + help="Specify which rules profile to be used.", + ) + parser.add_argument( + "-p", + "--parseable", + dest="parseable", + default=False, + action="store_true", + help="parseable output, same as '-f pep8'", + ) + parser.add_argument( + "--progressive", + dest="progressive", + default=False, + action="store_true", + help="Return success if number of violations compared with " + "previous git commit has not increased. This feature works " + "only in git repositories.", + ) + parser.add_argument( + "--project-dir", + dest="project_dir", + default=".", + help="Location of project/repository, autodetected based on location " + "of configuration file.", + ) + parser.add_argument( + "-r", + "--rules-dir", + action=AbspathArgAction, + dest="rulesdir", + default=[], + type=Path, + help="Specify custom rule directories. Add -R " + f"to keep using embedded rules from {DEFAULT_RULESDIR}", + ) + parser.add_argument( + "-R", + action="store_true", + default=False, + dest="use_default_rules", + help="Keep default rules when using -r", + ) + parser.add_argument( + "-s", + "--strict", + action="store_true", + default=False, + dest="strict", + help="Return non-zero exit code on warnings as well as errors", + ) + parser.add_argument( + "--write", + dest="write_list", + # this is a tri-state argument that takes an optional comma separated list: + # not provided, --write, --write=a,b,c + action=WriteArgAction, + help="Allow ansible-lint to reformat YAML files and run rule transforms " + "(Reformatting YAML files standardizes spacing, quotes, etc. " + "A rule transform can fix or simplify fixing issues identified by that rule). " + "You can limit the effective rule transforms (the 'write_list') by passing a " + "keywords 'all' or 'none' or a comma separated list of rule ids or rule tags. " + "YAML reformatting happens whenever '--write' or '--write=' is used. " + "'--write' and '--write=all' are equivalent: they allow all transforms to run. " + "The effective list of transforms comes from 'write_list' in the config file, " + "followed whatever '--write' args are provided on the commandline. " + "'--write=none' resets the list of transforms to allow reformatting YAML " + "without running any of the transforms (ie '--write=none,rule-id' will " + "ignore write_list in the config file and only run the rule-id transform).", + ) + parser.add_argument( + "--show-relpath", + dest="display_relative_path", + action="store_false", + default=True, + help="Display path relative to CWD", + ) + parser.add_argument( + "-t", + "--tags", + dest="tags", + action="append", + default=[], + help="only check rules whose id/tags match these values", + ) + parser.add_argument( + "-v", + dest="verbosity", + action="count", + help="Increase verbosity level (-vv for more)", + default=0, + ) + parser.add_argument( + "-x", + "--skip-list", + dest="skip_list", + default=[], + action="append", + help="only check rules whose id/tags do not match these values. \ + e.g: --skip-list=name,run-once", + ) + parser.add_argument( + "--generate-ignore", + dest="generate_ignore", + action="store_true", + default=False, + help="Generate a text file '.ansible-lint-ignore' that ignores all found violations. Each line contains filename and rule id separated by a space.", + ) + parser.add_argument( + "-w", + "--warn-list", + dest="warn_list", + default=[], + action="append", + help="only warn about these rules, unless overridden in " + f"config file. Current version default value is: {', '.join(DEFAULT_WARN_LIST)}", + ) + parser.add_argument( + "--enable-list", + dest="enable_list", + default=[], + action="append", + help="activate optional rules by their tag name", + ) + # Do not use store_true/store_false because they create opposite defaults. + parser.add_argument( + "--nocolor", + dest="colored", + action="store_const", + const=False, + help="disable colored output, same as NO_COLOR=1", + ) + parser.add_argument( + "--force-color", + dest="colored", + action="store_const", + const=True, + help="Force colored output, same as FORCE_COLOR=1", + ) + parser.add_argument( + "--exclude", + dest="exclude_paths", + action=AbspathArgAction, + type=Path, + default=[], + help="path to directories or files to skip. " "This option is repeatable.", + ) + parser.add_argument( + "-c", + "--config-file", + dest="config_file", + help="Specify configuration file to use. By default it will look for '.ansible-lint' or '.config/ansible-lint.yml'", + ) + parser.add_argument( + "--offline", + dest="offline", + action="store_const", + const=True, + help="Disable installation of requirements.yml and schema refreshing", + ) + parser.add_argument( + "--version", + action="store_true", + ) + parser.add_argument( + dest="lintables", + nargs="*", + action="extend", + help="One or more files or paths. When missing it will enable auto-detection mode.", + ) + + return parser + + +def merge_config(file_config: dict[Any, Any], cli_config: Namespace) -> Namespace: + """Combine the file config with the CLI args.""" + bools = ( + "display_relative_path", + "parseable", + "quiet", + "strict", + "use_default_rules", + "progressive", + "offline", + ) + # maps lists to their default config values + lists_map = { + "exclude_paths": [".cache", ".git", ".hg", ".svn", ".tox"], + "rulesdir": [], + "skip_list": [], + "tags": [], + "warn_list": DEFAULT_WARN_LIST, + "mock_modules": [], + "mock_roles": [], + "enable_list": [], + "only_builtins_allow_collections": [], + "only_builtins_allow_modules": [], + # do not include "write_list" here. See special logic below. + } + + scalar_map = { + "loop_var_prefix": None, + "project_dir": ".", + "profile": None, + "sarif_file": None, + } + + if not file_config: + # use defaults if we don't have a config file and the commandline + # parameter is not set + for entry, default in lists_map.items(): + if not getattr(cli_config, entry, None): + setattr(cli_config, entry, default) + return cli_config + + for entry in bools: + file_value = file_config.pop(entry, False) + v = getattr(cli_config, entry) or file_value + setattr(cli_config, entry, v) + + for entry, default in scalar_map.items(): + file_value = file_config.pop(entry, default) + v = getattr(cli_config, entry, None) or file_value + setattr(cli_config, entry, v) + + # if either commandline parameter or config file option is set merge + # with the other, if neither is set use the default + for entry, default in lists_map.items(): + if getattr(cli_config, entry, None) or entry in file_config.keys(): + value = getattr(cli_config, entry, []) + value.extend(file_config.pop(entry, [])) + else: + value = default + setattr(cli_config, entry, value) + + # "write_list" config has special merge rules + entry = "write_list" + setattr( + cli_config, + entry, + WriteArgAction.merge_write_list_config( + from_file=file_config.pop(entry, []), + from_cli=getattr(cli_config, entry, []) or [], + ), + ) + + if "verbosity" in file_config: + cli_config.verbosity = cli_config.verbosity + file_config.pop("verbosity") + + # merge options that can be set only via a file config + for entry, value in file_config.items(): + setattr(cli_config, entry, value) + + # append default kinds to the custom list + kinds = file_config.get("kinds", []) + kinds.extend(DEFAULT_KINDS) + setattr(cli_config, "kinds", kinds) + + return cli_config + + +def get_config(arguments: list[str]) -> Namespace: + """Extract the config based on given args.""" + parser = get_cli_parser() + options = parser.parse_args(arguments) + + # docs is not document, being used for internal documentation building + if options.list_rules and options.format not in [ + None, + "brief", + "full", + "md", + "docs", + ]: + parser.error( + f"argument -f: invalid choice: '{options.format}'. " + f"In combination with argument -L only 'brief', " + f"'rich' or 'md' are supported with -f." + ) + + # save info about custom config file, as options.config_file may be modified by merge_config + has_custom_config = not options.config_file + + file_config = load_config(options.config_file) + + config = merge_config(file_config, options) + + options.rulesdirs = get_rules_dirs(options.rulesdir, options.use_default_rules) + + if has_custom_config and options.project_dir == ".": + project_dir = guess_project_dir(options.config_file) + options.project_dir = os.path.expanduser(normpath(project_dir)) + + if not options.project_dir or not os.path.exists(options.project_dir): + raise RuntimeError( + f"Failed to determine a valid project_dir: {options.project_dir}" + ) + + # Compute final verbosity level by subtracting -q counter. + options.verbosity -= options.quiet + return config + + +def print_help(file: Any = sys.stdout) -> None: + """Print help test to the given stream.""" + get_cli_parser().print_help(file=file) + + +def get_rules_dirs(rulesdir: list[str], use_default: bool = True) -> list[str]: + """Return a list of rules dirs.""" + default_ruledirs = [DEFAULT_RULESDIR] + default_custom_rulesdir = os.environ.get( + CUSTOM_RULESDIR_ENVVAR, os.path.join(DEFAULT_RULESDIR, "custom") + ) + custom_ruledirs = sorted( + str(x.resolve()) + for x in Path(default_custom_rulesdir).iterdir() + if x.is_dir() and (x / "__init__.py").exists() + ) + + if use_default: + return rulesdir + custom_ruledirs + default_ruledirs + + return rulesdir or custom_ruledirs + default_ruledirs diff --git a/src/ansiblelint/color.py b/src/ansiblelint/color.py new file mode 100644 index 0000000..373ef78 --- /dev/null +++ b/src/ansiblelint/color.py @@ -0,0 +1,108 @@ +"""Console coloring and terminal support.""" +from __future__ import annotations + +from typing import Any + +import rich +import rich.markdown +from rich.console import Console +from rich.default_styles import DEFAULT_STYLES +from rich.style import Style +from rich.syntax import Syntax +from rich.theme import Theme + +# WARNING: When making style changes, be sure you test the output of +# `ansible-lint -L` on multiple terminals with dark/light themes, including: +# - iTerm2 (macOS) - bold might not be rendered differently +# - vscode integrated terminal - bold might not be rendered differently, links will not work +# +# When it comes to colors being used, try to match: +# - Ansible official documentation theme, https://docs.ansible.com/ansible/latest/dev_guide/developing_api.html +# - VSCode Ansible extension for syntax highlighting +# - GitHub markdown theme +# +# Current values: (docs) +# codeblock border: #404040 +# codeblock background: #edf0f2 +# codeblock comment: #6a737d (also italic) +# teletype-text: #e74c3c (red) +# teletype-text-border: 1px solid #e1e4e5 (background white) +# text: #404040 +# codeblock other-text: #555555 (black-ish) +# codeblock property: #22863a (green) +# codeblock integer: 032f62 (blue) +# codeblock command: #0086b3 (blue) - [shell] +# == python == +# class: #445588 (dark blue and bold) +# docstring: #dd1144 (red) +# self: #999999 (light-gray) +# method/function: #990000 (dark-red) +# number: #009999 cyan +# keywords (def,None,False,len,from,import): #007020 (green) bold +# super|dict|print: #0086b3 light-blue +# __name__: #bb60d5 (magenta) +# string: #dd1144 (light-red) +DEFAULT_STYLES.update( + { + # "code": Style(color="bright_black", bgcolor="red"), + "markdown.code": Style(color="bright_black"), + "markdown.code_block": Style(dim=True, color="cyan"), + } +) + + +_theme = Theme( + { + "info": "cyan", + "warning": "yellow", + "danger": "bold red", + "title": "yellow", + "error": "bright_red", + "filename": "blue", + } +) +console_options: dict[str, Any] = {"emoji": False, "theme": _theme, "soft_wrap": True} +console_options_stderr = console_options.copy() +console_options_stderr["stderr"] = True + +console = rich.get_console() +console_stderr = Console(**console_options_stderr) + + +def reconfigure(new_options: dict[str, Any]) -> None: + """Reconfigure console options.""" + global console_options # pylint: disable=global-statement,invalid-name + global console_stderr # pylint: disable=global-statement,invalid-name,global-variable-not-assigned + + console_options = new_options + rich.reconfigure(**new_options) + # see https://github.com/willmcgugan/rich/discussions/484#discussioncomment-200182 + new_console_options_stderr = console_options.copy() + new_console_options_stderr["stderr"] = True + tmp_console = Console(**new_console_options_stderr) + console_stderr.__dict__ = tmp_console.__dict__ + + +def render_yaml(text: str) -> Syntax: + """Colorize YAMl for nice display.""" + return Syntax(text, "yaml", theme="ansi_dark") + + +# pylint: disable=redefined-outer-name,unused-argument +def _rich_codeblock_custom_rich_console( + self: rich.markdown.CodeBlock, + console: Console, + options: rich.console.ConsoleOptions, +) -> rich.console.RenderResult: # pragma: no cover + code = str(self.text).rstrip() + syntax = Syntax( + code, + self.lexer_name, + theme=self.theme, + word_wrap=True, + background_color="default", + ) + yield syntax + + +rich.markdown.CodeBlock.__rich_console__ = _rich_codeblock_custom_rich_console # type: ignore diff --git a/src/ansiblelint/config.py b/src/ansiblelint/config.py new file mode 100644 index 0000000..c9262c9 --- /dev/null +++ b/src/ansiblelint/config.py @@ -0,0 +1,266 @@ +"""Store configuration options as a singleton.""" +from __future__ import annotations + +import json +import logging +import os +import sys +import time +import urllib.request +import warnings +from argparse import Namespace +from functools import lru_cache +from pathlib import Path +from typing import Any +from urllib.error import HTTPError, URLError + +from packaging.version import Version + +from ansiblelint import __version__ +from ansiblelint.loaders import yaml_from_file + +_logger = logging.getLogger(__name__) + + +CACHE_DIR = ( + os.path.expanduser(os.environ.get("XDG_CACHE_HOME", "~/.cache")) + "/ansible-lint" +) + +DEFAULT_WARN_LIST = [ + "experimental", + "jinja[spacing]", # warning until we resolve all reported false-positives +] + +DEFAULT_KINDS = [ + # Do not sort this list, order matters. + {"jinja2": "**/*.j2"}, # jinja2 templates are not always parsable as something else + {"jinja2": "**/*.j2.*"}, + {"yaml": ".github/**/*.{yaml,yml}"}, # github workflows + {"text": "**/templates/**/*.*"}, # templates are likely not validable + {"execution-environment": "**/execution-environment.yml"}, + {"ansible-lint-config": "**/.ansible-lint"}, + {"ansible-lint-config": "**/.config/ansible-lint.yml"}, + {"ansible-navigator-config": "**/ansible-navigator.{yaml,yml}"}, + {"inventory": "**/inventory/**.{yaml,yml}"}, + {"requirements": "**/meta/requirements.{yaml,yml}"}, # v1 only + # https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html + {"galaxy": "**/galaxy.yml"}, # Galaxy collection meta + {"reno": "**/releasenotes/*/*.{yaml,yml}"}, # reno release notes + {"tasks": "**/tasks/**/*.{yaml,yml}"}, + {"rulebook": "**/rulebooks/*.{yml,yaml"}, + {"playbook": "**/playbooks/*.{yml,yaml}"}, + {"playbook": "**/*playbook*.{yml,yaml}"}, + {"role": "**/roles/*/"}, + {"handlers": "**/handlers/*.{yaml,yml}"}, + {"vars": "**/{host_vars,group_vars,vars,defaults}/**/*.{yaml,yml}"}, + {"test-meta": "**/tests/integration/targets/*/meta/main.{yaml,yml}"}, + {"meta": "**/meta/main.{yaml,yml}"}, + {"meta-runtime": "**/meta/runtime.{yaml,yml}"}, + {"arg_specs": "**/meta/argument_specs.{yaml,yml}"}, # role argument specs + {"yaml": ".config/molecule/config.{yaml,yml}"}, # molecule global config + { + "requirements": "**/molecule/*/{collections,requirements}.{yaml,yml}" + }, # molecule old collection requirements (v1), ansible 2.8 only + {"yaml": "**/molecule/*/{base,molecule}.{yaml,yml}"}, # molecule config + {"requirements": "**/requirements.{yaml,yml}"}, # v2 and v1 + {"playbook": "**/molecule/*/*.{yaml,yml}"}, # molecule playbooks + {"yaml": "**/{.ansible-lint,.yamllint}"}, + {"changelog": "**/changelogs/changelog.yaml"}, + {"yaml": "**/*.{yaml,yml}"}, + {"yaml": "**/.*.{yaml,yml}"}, +] + +BASE_KINDS = [ + # These assignations are only for internal use and are only inspired by + # MIME/IANA model. Their purpose is to be able to process a file based on + # it type, including generic processing of text files using the prefix. + { + "text/jinja2": "**/*.j2" + }, # jinja2 templates are not always parsable as something else + {"text/jinja2": "**/*.j2.*"}, + {"text": "**/templates/**/*.*"}, # templates are likely not validable + {"text/json": "**/*.json"}, # standardized + {"text/markdown": "**/*.md"}, # https://tools.ietf.org/html/rfc7763 + {"text/rst": "**/*.rst"}, # https://en.wikipedia.org/wiki/ReStructuredText + {"text/ini": "**/*.ini"}, + # YAML has no official IANA assignation + {"text/yaml": "**/{.ansible-lint,.yamllint}"}, + {"text/yaml": "**/*.{yaml,yml}"}, + {"text/yaml": "**/.*.{yaml,yml}"}, +] + +PROFILES = yaml_from_file(Path(__file__).parent / "data" / "profiles.yml") + +LOOP_VAR_PREFIX = "^(__|{role}_)" + +options = Namespace( + cache_dir=None, + colored=True, + configured=False, + cwd=".", + display_relative_path=True, + exclude_paths=[], + format="brief", + lintables=[], + list_rules=False, + list_tags=False, + write_list=[], + parseable=False, + quiet=False, + rulesdirs=[], + skip_list=[], + tags=[], + verbosity=False, + warn_list=[], + kinds=DEFAULT_KINDS, + mock_filters=[], + mock_modules=[], + mock_roles=[], + loop_var_prefix=None, + only_builtins_allow_collections=[], + only_builtins_allow_modules=[], + var_naming_pattern=None, + offline=False, + project_dir=".", # default should be valid folder (do not use None here) + extra_vars=None, + enable_list=[], + skip_action_validation=True, + strict=False, + rules={}, # Placeholder to set and keep configurations for each rule. + profile=None, + task_name_prefix="{stem} | ", + sarif_file=None, +) + +# Used to store detected tag deprecations +used_old_tags: dict[str, str] = {} + +# Used to store collection list paths (with mock paths if needed) +collection_list: list[str] = [] + + +def get_rule_config(rule_id: str) -> dict[str, Any]: + """Get configurations for the rule ``rule_id``.""" + rule_config = options.rules.get(rule_id, {}) + if not isinstance(rule_config, dict): # pragma: no branch + raise RuntimeError(f"Invalid rule config for {rule_id}: {rule_config}") + return rule_config + + +@lru_cache +def ansible_collections_path() -> str: + """Return collection path variable for current version of Ansible.""" + # respect Ansible behavior, which is to load old name if present + for env_var in [ + "ANSIBLE_COLLECTIONS_PATHS", + "ANSIBLE_COLLECTIONS_PATH", + ]: # pragma: no cover + if env_var in os.environ: + return env_var + return "ANSIBLE_COLLECTIONS_PATH" + + +def in_venv() -> bool: + """Determine whether Python is running from a venv.""" + if hasattr(sys, "real_prefix") or os.environ.get("CONDA_EXE", None) is not None: + return True + + pfx = getattr(sys, "base_prefix", sys.prefix) + return pfx != sys.prefix + + +def guess_install_method() -> str: + """Guess if pip upgrade command should be used.""" + package_name = "ansible-lint" + pip = "" + if in_venv(): + _logger.debug("Found virtualenv, assuming `pip3 install` will work.") + pip = f"pip install --upgrade {package_name}" + elif __file__.startswith(os.path.expanduser("~/.local/lib")): + _logger.debug( + "Found --user installation, assuming `pip3 install --user` will work." + ) + pip = f"pip3 install --user --upgrade {package_name}" + + # By default we assume pip is not safe to be used + use_pip = False + try: + # Use pip to detect if is safe to use it to upgrade the package. + # We do imports here to for performance and reasons, and also in order + # to avoid errors if pip internals change. Also we want to avoid having + # to add pip as a dependency, so we make use of it only when present. + + # trick to avoid runtime warning from inside pip: _distutils_hack/__init__.py:33: UserWarning: Setuptools is replacing distutils. + with warnings.catch_warnings(record=True): + warnings.simplefilter("always") + # pylint: disable=import-outside-toplevel + from pip._internal.metadata import get_default_environment + from pip._internal.req.req_uninstall import uninstallation_paths + + dist = get_default_environment().get_distribution(package_name) + if dist: + logging.debug("Found %s dist", dist) + for _ in uninstallation_paths(dist): + use_pip = True + else: + logging.debug("Skipping %s as it is not installed.", package_name) + use_pip = False + # pylint: disable=broad-except + except Exception as exc: + # On Fedora 36, we got a AttributeError exception from pip that we want to avoid + logging.debug(exc) + use_pip = False + + # We only want to recommend pip for upgrade if it looks safe to do so. + return pip if use_pip else "" + + +def get_version_warning() -> str: + """Display warning if current version is outdated.""" + # 0.1dev1 is special fallback version + if __version__ == "0.1.dev1": # pragma: no cover + return "" + + msg = "" + data = {} + current_version = Version(__version__) + + if not os.path.exists(CACHE_DIR): # pragma: no cover + os.makedirs(CACHE_DIR) + cache_file = f"{CACHE_DIR}/latest.json" + refresh = True + if os.path.exists(cache_file): + age = time.time() - os.path.getmtime(cache_file) + if age < 24 * 60 * 60: + refresh = False + with open(cache_file, encoding="utf-8") as f: + data = json.load(f) + + if refresh or not data: + release_url = ( + "https://api.github.com/repos/ansible/ansible-lint/releases/latest" + ) + try: + with urllib.request.urlopen(release_url) as url: + data = json.load(url) + with open(cache_file, "w", encoding="utf-8") as f: + json.dump(data, f) + except (URLError, HTTPError) as exc: # pragma: no cover + _logger.debug( + "Unable to fetch latest version from %s due to: %s", release_url, exc + ) + return "" + + html_url = data["html_url"] + new_version = Version(data["tag_name"][1:]) # removing v prefix from tag + + if current_version > new_version: + msg = "[dim]You are using a pre-release version of ansible-lint.[/]" + elif current_version < new_version: + msg = f"""[warning]A new release of ansible-lint is available: [red]{current_version}[/] → [green][link={html_url}]{new_version}[/][/][/]""" + + pip = guess_install_method() + if pip: + msg += f" Upgrade by running: [info]{pip}[/]" + + return msg diff --git a/src/ansiblelint/constants.py b/src/ansiblelint/constants.py new file mode 100644 index 0000000..0fec8fd --- /dev/null +++ b/src/ansiblelint/constants.py @@ -0,0 +1,168 @@ +"""Constants used by AnsibleLint.""" +import os.path +from enum import Enum +from typing import Literal + +DEFAULT_RULESDIR = os.path.join(os.path.dirname(__file__), "rules") +CUSTOM_RULESDIR_ENVVAR = "ANSIBLE_LINT_CUSTOM_RULESDIR" +RULE_DOC_URL = "https://ansible-lint.readthedocs.io/rules/" + +SUCCESS_RC = 0 +VIOLATIONS_FOUND_RC = 2 +INVALID_CONFIG_RC = 3 +LOCK_TIMEOUT_RC = 4 +EXIT_CONTROL_C_RC = 130 + +# Minimal version of Ansible we support for runtime +ANSIBLE_MIN_VERSION = "2.12" + +ANSIBLE_MOCKED_MODULE = """\ +# This is a mocked Ansible module generated by ansible-lint +from ansible.module_utils.basic import AnsibleModule + +DOCUMENTATION = ''' +module: {name} + +short_description: Mocked +version_added: "1.0.0" +description: Mocked + +author: + - ansible-lint (@nobody) +''' +EXAMPLES = '''mocked''' +RETURN = '''mocked''' + + +def main(): + result = dict( + changed=False, + original_message='', + message='') + + module = AnsibleModule( + argument_spec=dict(), + supports_check_mode=True, + ) + module.exit_json(**result) + + +if __name__ == "__main__": + main() +""" + +FileType = Literal[ + "playbook", + "rulebook", + "meta", # role meta + "meta-runtime", + "tasks", # includes pre_tasks, post_tasks + "handlers", # very similar to tasks but with some specifics + # https://docs.ansible.com/ansible/latest/galaxy/user_guide.html#installing-roles-and-collections-from-the-same-requirements-yml-file + "requirements", + "role", # that is a folder! + "yaml", # generic yaml file, previously reported as unknown file type + "ansible-lint-config", + "", # unknown file type +] + + +# Aliases for deprecated tags/ids and their newer names +RENAMED_TAGS = { + "102": "no-jinja-when", + "104": "deprecated-bare-vars", + "105": "deprecated-module", + "106": "role-name", + "202": "risky-octal", + "203": "no-tabs", + "205": "playbook-extension", + "206": "jinja[spacing]", + "207": "jinja[invalid]", + "208": "risky-file-permissions", + "301": "no-changed-when", + "302": "deprecated-command-syntax", + "303": "command-instead-of-module", + "304": "inline-env-var", + "305": "command-instead-of-shell", + "306": "risky-shell-pipe", + "401": "latest[git]", + "402": "latest[hg]", + "403": "package-latest", + "404": "no-relative-paths", + "501": "partial-become", + "502": "unnamed-task", + "503": "no-handler", + "504": "deprecated-local-action", + "505": "missing-import", + "601": "literal-compare", + "602": "empty-string-compare", + "701": "meta-no-info", + "702": "meta-no-tags", + "703": "meta-incorrect", + "704": "meta-video-links", + "911": "syntax-check", + "var-spacing": "jinja[spacing]", + "unnamed-task": "name[missing]", + "git-latest": "latest[git]", + "hg-latest": "latest[hg]", + "no-jinja-nesting": "jinja[invalid]", + "no-loop-var-prefix": "loop-var-prefix", + "fqcn-builtins": "fqcn[action-core]", +} + +PLAYBOOK_TASK_KEYWORDS = [ + "tasks", + "handlers", + "pre_tasks", + "post_tasks", +] +NESTED_TASK_KEYS = [ + "block", + "always", + "rescue", +] + +# Keys that are used internally when parsing YAML/JSON files +SKIPPED_RULES_KEY = "__skipped_rules__" +LINE_NUMBER_KEY = "__line__" +FILENAME_KEY = "__file__" +ANNOTATION_KEYS = [FILENAME_KEY, LINE_NUMBER_KEY, SKIPPED_RULES_KEY] + +INCLUSION_ACTION_NAMES = { + "include", + "include_tasks", + "import_playbook", + "import_tasks", + "ansible.builtin.include", + "ansible.builtin.include_tasks", + "ansible.builtin.import_playbook", + "ansible.builtin.import_tasks", +} + +ROLE_IMPORT_ACTION_NAMES = { + "ansible.builtin.import_role", + "ansible.builtin.include_role", + "ansible.legacy.import_role", + "ansible.legacy.include_role", + "import_role", + "include_role", +} + +# Newer versions of git might fail to run when different file ownership is +# found of repo. One example is on GHA runners executing containerized +# reusable actions, where the mounted volume might have different owner. +# +# https://github.com/ansible/ansible-lint-action/issues/138 +GIT_CMD = ["git", "-c", f"safe.directory={os.getcwd()}"] + + +class States(Enum): + """States used are used as sentinel values in various places.""" + + NOT_LOADED = "File not loaded" + LOAD_FAILED = "File failed to load" + UNKNOWN_DATA = "Unknown data" + + def __bool__(self) -> bool: + """Ensure all states evaluate as False as booleans.""" + return False diff --git a/src/ansiblelint/data/profiles.yml b/src/ansiblelint/data/profiles.yml new file mode 100644 index 0000000..8de92fd --- /dev/null +++ b/src/ansiblelint/data/profiles.yml @@ -0,0 +1,121 @@ +--- +# Do not change sorting order of the primary keys as they also represent how +# progressive the profiles are, each one extending the one before it. +min: + description: > + The `min` profile ensures that Ansible can load content. + Rules in this profile are mandatory because they prevent fatal errors. + You can add files to the exclude list or provide dependencies to load the + correct files. + extends: null + rules: + internal-error: + load-failure: + parser-error: + syntax-check: +basic: + description: > + The `basic` profile prevents common coding issues and enforces standard styles and formatting. + extends: min + rules: + command-instead-of-module: + command-instead-of-shell: + deprecated-bare-vars: + deprecated-command-syntax: + deprecated-local-action: + deprecated-module: + inline-env-var: + key-order: + literal-compare: + jinja: + no-jinja-when: + no-tabs: + partial-become: + playbook-extension: + role-name: + schema: # can cover lots of rules, but not really be able to give best error messages + name: + var-naming: + yaml: + skip_list: # just because we enable them in following profiles + - name[template] + - name[casing] +moderate: + description: > + The `moderate` profile ensures that content adheres to best practices for making content easier to read and maintain. + extends: basic + rules: + name[template]: + name[imperative]: + url: https://github.com/ansible/ansible-lint/issues/2170 + name[casing]: + no-free-form: # schema-related + url: https://github.com/ansible/ansible-lint/issues/2117 + spell-var-name: + url: https://github.com/ansible/ansible-lint/issues/2168 +safety: + description: > + The `safety` profile avoids module calls that can have non-determinant outcomes or security concerns. + extends: moderate + rules: + avoid-implicit: + latest: + package-latest: + risky-file-permissions: + risky-octal: + risky-shell-pipe: +shared: + description: > + The `shared` profile ensures that content follows best practices for packaging and publishing. + This profile is intended for content creators who want to make Ansible + playbooks, roles, or collections available from + [galaxy.ansible.com](https://galaxy.ansible.com/), + [automation-hub](https://console.redhat.com/ansible/automation-hub), + or a private instance. + extends: safety + rules: + galaxy: # <-- applies to both galaxy and automation-hub + ignore-errors: + layout: + url: https://github.com/ansible/ansible-lint/issues/1900 + meta-incorrect: + meta-no-info: + meta-no-tags: + meta-video-links: + meta-version: + url: https://github.com/ansible/ansible-lint/issues/2103 + meta-runtime: + url: https://github.com/ansible/ansible-lint/issues/2102 + no-changed-when: + no-changelog: + url: https://github.com/ansible/ansible-lint/issues/2101 + no-handler: + no-relative-paths: + max-block-depth: + url: https://github.com/ansible/ansible-lint/issues/2173 + max-tasks: + url: https://github.com/ansible/ansible-lint/issues/2172 + unsafe-loop: + # unsafe-loop[prefix] (currently named "no-var-prefix") + # [unsafe-loop[var-prefix|iterator]] + url: https://github.com/ansible/ansible-lint/issues/2038 +production: + description: > + The `production` profile ensures that content meets requirements for + inclusion in [Ansible Automation Platform (AAP)](https://www.redhat.com/en/technologies/management/ansible) + as validated or certified content. + extends: shared + rules: + avoid-dot-notation: + url: https://github.com/ansible/ansible-lint/issues/2174 + disallowed-ignore: # [sanity] + url: https://github.com/ansible/ansible-lint/issues/2121 + fqcn: + import-task-no-when: + url: https://github.com/ansible/ansible-lint/issues/2219 + meta-no-dependencies: + url: https://github.com/ansible/ansible-lint/issues/2159 + single-entry-point: + url: https://github.com/ansible/ansible-lint/issues/2242 + use-loop: + url: https://github.com/ansible/ansible-lint/issues/2204 diff --git a/src/ansiblelint/errors.py b/src/ansiblelint/errors.py new file mode 100644 index 0000000..c5a1895 --- /dev/null +++ b/src/ansiblelint/errors.py @@ -0,0 +1,144 @@ +"""Exceptions and error representations.""" +from __future__ import annotations + +import functools +from typing import Any + +from ansiblelint._internal.rules import BaseRule, RuntimeErrorRule +from ansiblelint.config import options +from ansiblelint.file_utils import Lintable, normpath + + +# pylint: disable=too-many-instance-attributes +@functools.total_ordering +class MatchError(ValueError): + """Rule violation detected during linting. + + It can be raised as Exception but also just added to the list of found + rules violations. + + Note that line argument is not considered when building hash of an + instance. + """ + + tag = "" + + # IMPORTANT: any additional comparison protocol methods must return + # IMPORTANT: `NotImplemented` singleton to allow the check to use the + # IMPORTANT: other object's fallbacks. + # Ref: https://docs.python.org/3/reference/datamodel.html#object.__lt__ + + # pylint: disable=too-many-arguments + def __init__( + self, + message: str | None = None, + # most linters report use (1,1) base, including yamllint and flake8 + # we should never report line 0 or column 0 in output. + linenumber: int = 1, + column: int | None = None, + details: str = "", + filename: Lintable | None = None, + rule: BaseRule = RuntimeErrorRule(), + tag: str | None = None, # optional fine-graded tag + ) -> None: + """Initialize a MatchError instance.""" + super().__init__(message) + + if rule.__class__ is RuntimeErrorRule and not message: + raise TypeError( + f"{self.__class__.__name__}() missing a " + "required argument: one of 'message' or 'rule'", + ) + + self.message = str(message or getattr(rule, "shortdesc", "")) + + # Safety measure to ensure we do not end-up with incorrect indexes + if linenumber == 0: # pragma: no cover + raise RuntimeError( + "MatchError called incorrectly as line numbers start with 1" + ) + if column == 0: # pragma: no cover + raise RuntimeError( + "MatchError called incorrectly as column numbers start with 1" + ) + + self.linenumber = linenumber + self.column = column + self.details = details + self.filename = "" + if filename: + if isinstance(filename, Lintable): + self.lintable = filename + self.filename = normpath(str(filename.path)) + else: + self.filename = normpath(filename) + self.lintable = Lintable(self.filename) + self.rule = rule + self.ignored = False # If set it will be displayed but not counted as failure + # This can be used by rules that can report multiple errors type, so + # we can still filter by them. + self.tag = tag or rule.id + + # optional indicator on how this error was found + self.match_type: str | None = None + # for task matches, save the normalized task object (useful for transforms) + self.task: dict[str, Any] | None = None + # path to the problem area, like: [0,"pre_tasks",3] for [0].pre_tasks[3] + self.yaml_path: list[int | str] = [] + # True when a transform has resolved this MatchError + self.fixed = False + + @functools.cached_property + def level(self) -> str: + """Return the level of the rule: error, warning or notice.""" + if {self.tag, self.rule.id, *self.rule.tags}.isdisjoint(options.warn_list): + return "error" + return "warning" + + def __repr__(self) -> str: + """Return a MatchError instance representation.""" + formatstr = "[{0}] ({1}) matched {2}:{3} {4}" + # note that `rule.id` can be int, str or even missing, as users + # can defined their own custom rules. + _id = getattr(self.rule, "id", "000") + + return formatstr.format( + _id, self.message, self.filename, self.linenumber, self.details + ) + + @property + def position(self) -> str: + """Return error positioning, with column number if available.""" + if self.column: + return f"{self.linenumber}:{self.column}" + return str(self.linenumber) + + @property + def _hash_key(self) -> Any: + # line attr is knowingly excluded, as dict is not hashable + return ( + self.filename, + self.linenumber, + str(getattr(self.rule, "id", 0)), + self.message, + self.details, + # -1 is used here to force errors with no column to sort before + # all other errors. + -1 if self.column is None else self.column, + ) + + def __lt__(self, other: object) -> bool: + """Return whether the current object is less than the other.""" + if not isinstance(other, self.__class__): + return NotImplemented + return bool(self._hash_key < other._hash_key) + + def __hash__(self) -> int: + """Return a hash value of the MatchError instance.""" + return hash(self._hash_key) + + def __eq__(self, other: object) -> bool: + """Identify whether the other object represents the same rule match.""" + if not isinstance(other, self.__class__): + return NotImplemented + return self.__hash__() == other.__hash__() diff --git a/src/ansiblelint/file_utils.py b/src/ansiblelint/file_utils.py new file mode 100644 index 0000000..500dff7 --- /dev/null +++ b/src/ansiblelint/file_utils.py @@ -0,0 +1,523 @@ +"""Utility functions related to file operations.""" +from __future__ import annotations + +import copy +import logging +import os +import pathlib +import subprocess +import sys +from argparse import Namespace +from collections import OrderedDict, defaultdict +from contextlib import contextmanager +from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Iterator, cast + +import wcmatch.pathlib +from wcmatch.wcmatch import RECURSIVE, WcMatch +from yaml.error import YAMLError + +from ansiblelint.config import BASE_KINDS, options +from ansiblelint.constants import GIT_CMD, FileType, States + +if TYPE_CHECKING: + # https://github.com/PyCQA/pylint/issues/3979 + BasePathLike = os.PathLike[Any] # pylint: disable=unsubscriptable-object +else: + BasePathLike = os.PathLike + +_logger = logging.getLogger(__package__) + + +def abspath(path: str, base_dir: str) -> str: + """Make relative path absolute relative to given directory. + + Args: + path (str): the path to make absolute + base_dir (str): the directory from which make \ + relative paths absolute + """ + if not os.path.isabs(path): + # Don't use abspath as it assumes path is relative to cwd. + # We want it relative to base_dir. + path = os.path.join(base_dir, path) + + return os.path.normpath(path) + + +def normpath(path: str | BasePathLike) -> str: + """ + Normalize a path in order to provide a more consistent output. + + Currently it generates a relative path but in the future we may want to + make this user configurable. + """ + # prevent possible ValueError with relpath(), when input is an empty string + if not path: + path = "." + # conversion to string in order to allow receiving non string objects + relpath = os.path.relpath(str(path)) + path_absolute = os.path.abspath(str(path)) + if path_absolute.startswith(os.getcwd()): + return relpath + if path_absolute.startswith(os.path.expanduser("~")): + return path_absolute.replace(os.path.expanduser("~"), "~") + # we avoid returning relative paths that end-up at root level + if path_absolute in relpath: + return path_absolute + if relpath.startswith("../"): + return path_absolute + return relpath + + +# That is needed for compatibility with py38, later was added to Path class +def is_relative_to(path: Path, *other: Any) -> bool: + """Return True if the path is relative to another path or False.""" + try: + path.resolve().absolute().relative_to(*other) + return True + except ValueError: + return False + + +def normpath_path(path: str | BasePathLike) -> Path: + """Normalize a path in order to provide a more consistent output. + + - Any symlinks are resolved. + - Any paths outside the CWD are resolved to their absolute path. + - Any absolute path within current user home directory is compressed to + make use of '~', so it is easier to read and more portable. + """ + if not isinstance(path, Path): + path = Path(path) + + is_relative = is_relative_to(path, path.cwd()) + path = path.resolve() + if is_relative: + path = path.relative_to(path.cwd()) + + # Compress any absolute path within current user home directory + if path.is_absolute(): + home = Path.home() + if is_relative_to(path, home): + path = Path("~") / path.relative_to(home) + + return path + + +@contextmanager +def cwd(path: str | BasePathLike) -> Iterator[None]: + """Context manager for temporary changing current working directory.""" + old_pwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(old_pwd) + + +def expand_path_vars(path: str) -> str: + """Expand the environment or ~ variables in a path string.""" + # It may be possible for function to be called with a Path object + path = str(path).strip() + path = os.path.expanduser(path) + path = os.path.expandvars(path) + return path + + +def expand_paths_vars(paths: list[str]) -> list[str]: + """Expand the environment or ~ variables in a list.""" + paths = [expand_path_vars(p) for p in paths] + return paths + + +def kind_from_path(path: Path, base: bool = False) -> FileType: + """Determine the file kind based on its name. + + When called with base=True, it will return the base file type instead + of the explicit one. That is expected to return 'yaml' for any yaml files. + """ + # pathlib.Path.match patterns are very limited, they do not support *a*.yml + # glob.glob supports **/foo.yml but not multiple extensions + pathex = wcmatch.pathlib.PurePath(str(path.absolute().resolve())) + kinds = options.kinds if not base else BASE_KINDS + for entry in kinds: + for k, v in entry.items(): + if pathex.globmatch( + v, + flags=( + wcmatch.pathlib.GLOBSTAR + | wcmatch.pathlib.BRACE + | wcmatch.pathlib.DOTGLOB + ), + ): + return str(k) # type: ignore + + if base: + # Unknown base file type is default + return "" + + if path.is_dir(): + return "role" + + if str(path) == "/dev/stdin": + return "playbook" + + # Unknown file types report a empty string (evaluated as False) + return "" + + +# pylint: disable=too-many-instance-attributes +class Lintable: + """Defines a file/folder that can be linted. + + Providing file content when creating the object allow creation of in-memory + instances that do not need files to be present on disk. + + When symlinks are given, they will always be resolved to their target. + """ + + def __init__( + self, + name: str | Path, + content: str | None = None, + kind: FileType | None = None, + base_kind: str = "", + ): + """Create a Lintable instance.""" + self.dir: str = "" + self.kind: FileType | None = None + self.stop_processing = False # Set to stop other rules from running + self._data: Any = States.NOT_LOADED + self.line_skips: dict[int, set[str]] = defaultdict(set) + self.exc: Exception | None = None # Stores data loading exceptions + + if isinstance(name, str): + name = Path(name) + is_relative = is_relative_to(name, str(name.cwd())) + name = name.resolve() + if is_relative: + name = name.relative_to(name.cwd()) + name = normpath_path(name) + self.path = name + # Filename is effective file on disk, for stdin is a namedtempfile + self.name = self.filename = str(name) + + self._content = self._original_content = content + self.updated = False + + # if the lintable is part of a role, we save role folder name + self.role = "" + parts = self.path.parent.parts + if "roles" in parts: + role = self.path + while role.parent.name != "roles" and role.name: + role = role.parent + if role.exists(): + self.role = role.name + + if str(self.path) in ["/dev/stdin", "-"]: + # pylint: disable=consider-using-with + self.file = NamedTemporaryFile(mode="w+", suffix="playbook.yml") + self.filename = self.file.name + self._content = sys.stdin.read() + self.file.write(self._content) + self.file.flush() + self.path = Path(self.file.name) + self.name = "stdin" + self.kind = "playbook" + self.dir = "/" + else: + self.kind = kind or kind_from_path(self.path) + # We store absolute directory in dir + if not self.dir: + if self.kind == "role": + self.dir = str(self.path.resolve()) + else: + self.dir = str(self.path.parent.resolve()) + + # determine base file kind (yaml, xml, ini, ...) + self.base_kind = base_kind or kind_from_path(self.path, base=True) + self.abspath = self.path.expanduser().absolute() + + if self.kind == "yaml": + self.data # pylint: disable=pointless-statement + + def _guess_kind(self) -> None: + if self.kind == "yaml": + if isinstance(self.data, list) and "hosts" in self.data[0]: + if "rules" not in self.data[0]: + self.kind = "playbook" + else: + self.kind = "rulebook" + # we we failed to guess the more specific kind, we warn user + if self.kind == "yaml": + _logger.debug( + "Passed '%s' positional argument was identified as generic '%s' file kind.", + self.name, + self.kind, + ) + + def __getitem__(self, key: Any) -> Any: + """Provide compatibility subscriptable support.""" + if key == "path": + return str(self.path) + if key == "type": + return str(self.kind) + raise NotImplementedError() + + def get(self, key: Any, default: Any = None) -> Any: + """Provide compatibility subscriptable support.""" + try: + return self[key] + except NotImplementedError: + return default + + def _populate_content_cache_from_disk(self) -> None: + # Can raise UnicodeDecodeError + try: + self._content = self.path.expanduser().resolve().read_text(encoding="utf-8") + except FileNotFoundError as ex: + if vars(options).get("progressive"): + self._content = "" + else: + raise ex + if self._original_content is None: + self._original_content = self._content + + @property + def content(self) -> str: + """Retrieve file content, from internal cache or disk.""" + if self._content is None: + self._populate_content_cache_from_disk() + return cast(str, self._content) + + @content.setter + def content(self, value: str) -> None: + """Update ``content`` and calculate ``updated``. + + To calculate ``updated`` this will read the file from disk if the cache + has not already been populated. + """ + if not isinstance(value, str): + raise TypeError(f"Expected str but got {type(value)}") + if self._original_content is None: + if self._content is not None: + self._original_content = self._content + elif self.path.exists(): + self._populate_content_cache_from_disk() + else: + # new file + self._original_content = "" + self.updated = self._original_content != value + self._content = value + + @content.deleter + def content(self) -> None: + """Reset the internal content cache.""" + self._content = None + + def write(self, force: bool = False) -> None: + """Write the value of ``Lintable.content`` to disk. + + This only writes to disk if the content has been updated (``Lintable.updated``). + For example, you can update the content, and then write it to disk like this: + + .. code:: python + + lintable.content = new_content + lintable.write() + + Use ``force=True`` when you want to force a content rewrite even if the + content has not changed. For example: + + .. code:: python + + lintable.write(force=True) + """ + if not force and not self.updated: + # No changes to write. + return + self.path.expanduser().resolve().write_text( + self._content or "", encoding="utf-8" + ) + + def __hash__(self) -> int: + """Return a hash value of the lintables.""" + return hash((self.name, self.kind, self.abspath)) + + def __eq__(self, other: object) -> bool: + """Identify whether the other object represents the same rule match.""" + if isinstance(other, Lintable): + return bool(self.name == other.name and self.kind == other.kind) + return False + + def __repr__(self) -> str: + """Return user friendly representation of a lintable.""" + return f"{self.name} ({self.kind})" + + @property + def data(self) -> Any: + """Return loaded data representation for current file, if possible.""" + if self._data == States.NOT_LOADED: + if self.path.is_dir(): + self._data = None + return self._data + try: + if str(self.base_kind) == "text/yaml": + from ansiblelint.utils import ( # pylint: disable=import-outside-toplevel + parse_yaml_linenumbers, + ) + + self._data = parse_yaml_linenumbers(self) + # now that _data is not empty, we can try guessing if playbook or rulebook + # it has to be done before append_skipped_rules() call as it's relying + # on self.kind. + if self.kind == "yaml": + self._guess_kind() + # Lazy import to avoid delays and cyclic-imports + if "append_skipped_rules" not in globals(): + # pylint: disable=import-outside-toplevel + from ansiblelint.skip_utils import append_skipped_rules + + self._data = append_skipped_rules(self._data, self) + else: + logging.debug( + "data set to None for %s due to being of %s kind.", + self.path, + self.base_kind, + ) + self._data = States.UNKNOWN_DATA + + except (RuntimeError, FileNotFoundError, YAMLError) as exc: + self._data = States.LOAD_FAILED + self.exc = exc + return self._data + + +# pylint: disable=redefined-outer-name +def discover_lintables(options: Namespace) -> dict[str, Any]: + """Find all files that we know how to lint. + + Return format is normalized, relative for stuff below cwd, ~/ for content + under current user and absolute for everything else. + """ + # git is preferred as it also considers .gitignore + git_command_present = [ + *GIT_CMD, + "ls-files", + "--cached", + "--others", + "--exclude-standard", + "-z", + ] + git_command_absent = [*GIT_CMD, "ls-files", "--deleted", "-z"] + out = None + + try: + out_present = subprocess.check_output( + git_command_present, stderr=subprocess.STDOUT, text=True + ).split("\x00")[:-1] + _logger.info( + "Discovered files to lint using: %s", " ".join(git_command_present) + ) + + out_absent = subprocess.check_output( + git_command_absent, stderr=subprocess.STDOUT, text=True + ).split("\x00")[:-1] + _logger.info("Excluded removed files using: %s", " ".join(git_command_absent)) + + out = set(out_present) - set(out_absent) + except subprocess.CalledProcessError as exc: + if not (exc.returncode == 128 and "fatal: not a git repository" in exc.output): + _logger.warning( + "Failed to discover lintable files using git: %s", + exc.output.rstrip("\n"), + ) + except FileNotFoundError as exc: + if options.verbosity: + _logger.warning("Failed to locate command: %s", exc) + + if out is None: + exclude_pattern = "|".join(str(x) for x in options.exclude_paths) + _logger.info("Looking up for files, excluding %s ...", exclude_pattern) + # remove './' prefix from output of WcMatch + out = { + strip_dotslash_prefix(fname) + for fname in WcMatch( + ".", exclude_pattern=exclude_pattern, flags=RECURSIVE, limit=256 + ).match() + } + + return OrderedDict.fromkeys(sorted(out)) + + +def strip_dotslash_prefix(fname: str) -> str: + """Remove ./ leading from filenames.""" + return fname[2:] if fname.startswith("./") else fname + + +def guess_project_dir(config_file: str | None) -> str: + """Return detected project dir or current working directory.""" + path = None + if config_file is not None and config_file != "/dev/null": + target = pathlib.Path(config_file) + if target.exists(): + # for config inside .config, we return the parent dir as project dir + cfg_path = target.parent + if cfg_path.parts[-1] == ".config": + path = str(cfg_path.parent.absolute()) + else: + path = str(cfg_path.absolute()) + + if path is None: + try: + result = subprocess.run( + [*GIT_CMD, "rev-parse", "--show-toplevel"], + capture_output=True, + text=True, + check=True, + ) + + path = result.stdout.splitlines()[0] + except subprocess.CalledProcessError as exc: + if not ( + exc.returncode == 128 and "fatal: not a git repository" in exc.stderr + ): + _logger.warning( + "Failed to guess project directory using git: %s", + exc.stderr.rstrip("\n"), + ) + except FileNotFoundError as exc: + _logger.warning("Failed to locate command: %s", exc) + + if path is None: + path = os.getcwd() + + _logger.info( + "Guessed %s as project root directory", + path, + ) + + return path + + +def expand_dirs_in_lintables(lintables: set[Lintable]) -> None: + """Return all recognized lintables within given directory.""" + should_expand = False + + for item in lintables: + if item.path.is_dir(): + should_expand = True + break + + if should_expand: + # this relies on git and we do not want to call unless needed + all_files = discover_lintables(options) + + for item in copy.copy(lintables): + if item.path.is_dir(): + for filename in all_files: + if filename.startswith(str(item.path)): + lintables.add(Lintable(filename)) diff --git a/src/ansiblelint/formatters/__init__.py b/src/ansiblelint/formatters/__init__.py new file mode 100644 index 0000000..2914f6d --- /dev/null +++ b/src/ansiblelint/formatters/__init__.py @@ -0,0 +1,315 @@ +"""Output formatters.""" +from __future__ import annotations + +import hashlib +import json +import os +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Generic, List, Tuple, TypeVar, Union + +import rich + +from ansiblelint.config import options +from ansiblelint.version import __version__ + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + +T = TypeVar("T", bound="BaseFormatter") # type: ignore + + +class BaseFormatter(Generic[T]): + """Formatter of ansible-lint output. + + Base class for output formatters. + + Args: + base_dir (str|Path): reference directory against which display relative path. + display_relative_path (bool): whether to show path as relative or absolute + """ + + def __init__(self, base_dir: str | Path, display_relative_path: bool) -> None: + """Initialize a BaseFormatter instance.""" + if isinstance(base_dir, str): + base_dir = Path(base_dir) + if base_dir: # can be None + base_dir = base_dir.absolute() + + self._base_dir = base_dir if display_relative_path else None + + def _format_path(self, path: str | Path) -> str | Path: + if not self._base_dir or not path: + return path + # Use os.path.relpath 'cause Path.relative_to() misbehaves + return os.path.relpath(path, start=self._base_dir) + + def format(self, match: MatchError) -> str: + """Format a match error.""" + return str(match) + + @staticmethod + def escape(text: str) -> str: + """Escapes a string to avoid processing it as markup.""" + return rich.markup.escape(text) + + +class Formatter(BaseFormatter): # type: ignore + """Default output formatter of ansible-lint.""" + + def format(self, match: MatchError) -> str: + _id = getattr(match.rule, "id", "000") + result = f"[{match.level}][bold][link={match.rule.url}]{self.escape(match.tag)}[/link][/][/][dim]:[/] [{match.level}]{self.escape(match.message)}[/]" + if match.level != "error": + result += f" [dim][{match.level}]({match.level})[/][/]" + if match.ignored: + result += " [dim]# ignored[/]" + result += ( + "\n" + f"[filename]{self._format_path(match.filename or '')}[/]:{match.position}" + ) + if match.details: + result += f" [dim]{self.escape(str(match.details))}[/]" + result += "\n" + return result + + +class QuietFormatter(BaseFormatter[Any]): + """Brief output formatter for ansible-lint.""" + + def format(self, match: MatchError) -> str: + return ( + f"[{match.level}]{match.rule.id}[/] " + f"[filename]{self._format_path(match.filename or '')}[/]:{match.position}" + ) + + +class ParseableFormatter(BaseFormatter[Any]): + """Parseable uses PEP8 compatible format.""" + + def format(self, match: MatchError) -> str: + result = ( + f"[filename]{self._format_path(match.filename or '')}[/][dim]:{match.position}:[/] " + f"[{match.level}][bold]{self.escape(match.tag)}[/bold]" + f"{ f': {match.message}' if not options.quiet else '' }[/]" + ) + if match.level != "error": + result += f" [dim][{match.level}]({match.level})[/][/]" + + return result + + +class AnnotationsFormatter(BaseFormatter): # type: ignore + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message + """Formatter for emitting violations as GitHub Workflow Commands. + + These commands trigger the GHA Workflow runners platform to post violations + in a form of GitHub Checks API annotations that appear rendered in pull- + request files view. + + ::debug file={name},line={line},col={col},severity={severity}::{message} + ::warning file={name},line={line},col={col},severity={severity}::{message} + ::error file={name},line={line},col={col},severity={severity}::{message} + + Supported levels: debug, warning, error + """ + + def format(self, match: MatchError) -> str: + """Prepare a match instance for reporting as a GitHub Actions annotation.""" + file_path = self._format_path(match.filename or "") + line_num = match.linenumber + severity = match.rule.severity + violation_details = self.escape(match.message) + if match.column: + col = f",col={match.column}" + else: + col = "" + return ( + f"::{match.level} file={file_path},line={line_num}{col},severity={severity},title={match.tag}" + f"::{violation_details}" + ) + + +class CodeclimateJSONFormatter(BaseFormatter[Any]): + """Formatter for emitting violations in Codeclimate JSON report format. + + The formatter expects a list of MatchError objects and returns a JSON formatted string. + The spec for the codeclimate report can be found here: + https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#user-content-data-types + """ + + def format_result(self, matches: list[MatchError]) -> str: + """Format a list of match errors as a JSON string.""" + if not isinstance(matches, list): + raise RuntimeError( + f"The {self.__class__} was expecting a list of MatchError." + ) + + result = [] + for match in matches: + issue: dict[str, Any] = {} + issue["type"] = "issue" + issue["check_name"] = match.tag or match.rule.id # rule-id[subrule-id] + issue["categories"] = match.rule.tags + if match.rule.url: + # https://github.com/codeclimate/platform/issues/68 + issue["url"] = match.rule.url + issue["severity"] = self._remap_severity(match) + # level is not part of CodeClimate specification, but there is + # no other way to expose that info. We recommend switching to + # SARIF format which is better suited for interoperability. + issue["level"] = match.level + issue["description"] = self.escape(str(match.message)) + issue["fingerprint"] = hashlib.sha256( + repr(match).encode("utf-8") + ).hexdigest() + issue["location"] = {} + issue["location"]["path"] = self._format_path(match.filename or "") + if match.column: + issue["location"]["positions"] = {} + issue["location"]["positions"]["begin"] = {} + issue["location"]["positions"]["begin"]["line"] = match.linenumber + issue["location"]["positions"]["begin"]["column"] = match.column + else: + issue["location"]["lines"] = {} + issue["location"]["lines"]["begin"] = match.linenumber + if match.details: + issue["content"] = {} + issue["content"]["body"] = match.details + # Append issue to result list + result.append(issue) + + return json.dumps(result) + + @staticmethod + def _remap_severity(match: MatchError) -> str: + severity = match.rule.severity + + if severity in ["LOW"]: + return "minor" + if severity in ["MEDIUM"]: + return "major" + if severity in ["HIGH"]: + return "critical" + if severity in ["VERY_HIGH"]: + return "blocker" + # VERY_LOW, INFO or anything else + return "info" + + +class SarifFormatter(BaseFormatter[Any]): + """Formatter for emitting violations in SARIF report format. + + The spec of SARIF can be found here: + https://docs.oasis-open.org/sarif/sarif/v2.1.0/ + """ + + BASE_URI_ID = "SRCROOT" + TOOL_NAME = "ansible-lint" + TOOL_URL = "https://github.com/ansible/ansible-lint" + SARIF_SCHEMA_VERSION = "2.1.0" + SARIF_SCHEMA = ( + "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.5.json" + ) + + def format_result(self, matches: list[MatchError]) -> str: + """Format a list of match errors as a JSON string.""" + if not isinstance(matches, list): + raise RuntimeError( + f"The {self.__class__} was expecting a list of MatchError." + ) + + root_path = Path(str(self._base_dir)).as_uri() + root_path = root_path + "/" if not root_path.endswith("/") else root_path + rules, results = self._extract_results(matches) + + tool = { + "driver": { + "name": self.TOOL_NAME, + "version": __version__, + "informationUri": self.TOOL_URL, + "rules": rules, + } + } + + runs = [ + { + "tool": tool, + "columnKind": "utf16CodeUnits", + "results": results, + "originalUriBaseIds": { + self.BASE_URI_ID: {"uri": root_path}, + }, + } + ] + + report = { + "$schema": self.SARIF_SCHEMA, + "version": self.SARIF_SCHEMA_VERSION, + "runs": runs, + } + + return json.dumps( + report, default=lambda o: o.__dict__, sort_keys=False, indent=2 + ) + + def _extract_results( + self, matches: list[MatchError] + ) -> tuple[list[Any], list[Any]]: + rules = {} + results = [] + for match in matches: + if match.tag not in rules: + rules[match.tag] = self._to_sarif_rule(match) + results.append(self._to_sarif_result(match)) + return list(rules.values()), results + + def _to_sarif_rule(self, match: MatchError) -> dict[str, Any]: + rule: dict[str, Any] = { + "id": match.tag, + "name": match.tag, + "shortDescription": { + "text": str(match.message), + }, + "defaultConfiguration": { + "level": self._to_sarif_level(match), + }, + "help": { + "text": str(match.rule.description), + }, + "helpUri": match.rule.url, + "properties": {"tags": match.rule.tags}, + } + if match.rule.link: + rule["helpUri"] = match.rule.link + return rule + + def _to_sarif_result(self, match: MatchError) -> dict[str, Any]: + result: dict[str, Any] = { + "ruleId": match.tag, + "message": { + "text": str(match.message), + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": self._format_path(match.filename or ""), + "uriBaseId": self.BASE_URI_ID, + }, + "region": { + "startLine": match.linenumber, + }, + }, + }, + ], + } + if match.column: + result["locations"][0]["physicalLocation"]["region"][ + "startColumn" + ] = match.column + return result + + @staticmethod + def _to_sarif_level(match: MatchError) -> str: + # sarif accepts only 4 levels: error, warning, note, none + return match.level diff --git a/src/ansiblelint/generate_docs.py b/src/ansiblelint/generate_docs.py new file mode 100644 index 0000000..2e518d1 --- /dev/null +++ b/src/ansiblelint/generate_docs.py @@ -0,0 +1,176 @@ +"""Utils to generate rules documentation.""" +import logging +from pathlib import Path +from typing import Iterable + +from rich import box +from rich.console import RenderableType + +# Remove this compatibility try-catch block once we drop support for rich < 10.7.0 +try: + from rich.console import group +except ImportError: + from rich.console import render_group as group # type: ignore + +from rich.markdown import Markdown +from rich.table import Table + +from ansiblelint.config import PROFILES +from ansiblelint.constants import RULE_DOC_URL +from ansiblelint.rules import RulesCollection + +DOC_HEADER = """ +# Default Rules + +(lint_default_rules)= + +Below you can see the list of default rules Ansible Lint use to evaluate playbooks and roles: + +""" + +_logger = logging.getLogger(__name__) + + +def rules_as_docs(rules: RulesCollection) -> str: + """Dump documentation files for all rules, returns only confirmation message. + + That is internally used for building documentation and the API can change + at any time. + """ + result = "" + dump_path = Path(".") / "docs" / "rules" + if not dump_path.exists(): + raise RuntimeError(f"Failed to find {dump_path} folder for dumping rules.") + + with open(dump_path / ".." / "profiles.md", "w", encoding="utf-8") as f: + f.write(profiles_as_md(header=True, docs_url="rules/")) + + for rule in rules.alphabetical(): + result = "" + with open(dump_path / f"{rule.id}.md", "w", encoding="utf-8") as f: + # because title == rule.id we get the desired labels for free + # and we do not have to insert `(target_header)=` + title = f"{rule.id}" + + if rule.help: + if not rule.help.startswith(f"# {rule.id}"): + raise RuntimeError( + f"Rule {rule.__class__} markdown help does not start with `# {rule.id}` header.\n{rule.help}" + ) + result = result[1:] + result += f"{rule.help}" + else: + description = rule.description + if rule.link: + description += f" [more]({rule.link})" + + result += f"# {title}\n\n**{rule.shortdesc}**\n\n{description}" + result = result.strip() + "\n" + f.write(result) + + return "All markdown files for rules were dumped!" + + +def rules_as_str(rules: RulesCollection) -> RenderableType: + """Return rules as string.""" + table = Table(show_header=False, header_style="title", box=box.SIMPLE) + for rule in rules.alphabetical(): + if rule.tags: + tag = f"[dim] ({', '.join(rule.tags)})[/dim]" + else: + tag = "" + table.add_row( + f"[link={RULE_DOC_URL}{rule.id}/]{rule.id}[/link]", rule.shortdesc + tag + ) + return table + + +def rules_as_md(rules: RulesCollection) -> str: + """Return md documentation for a list of rules.""" + result = DOC_HEADER + + for rule in rules.alphabetical(): + # because title == rule.id we get the desired labels for free + # and we do not have to insert `(target_header)=` + title = f"{rule.id}" + + if rule.help: + if not rule.help.startswith(f"# {rule.id}"): + raise RuntimeError( + f"Rule {rule.__class__} markdown help does not start with `# {rule.id}` header.\n{rule.help}" + ) + result += f"\n\n{rule.help}" + else: + description = rule.description + if rule.link: + description += f" [more]({rule.link})" + + result += f"\n\n## {title}\n\n**{rule.shortdesc}**\n\n{description}" + + return result + + +@group() +def rules_as_rich(rules: RulesCollection) -> Iterable[Table]: + """Print documentation for a list of rules, returns empty string.""" + width = max(16, *[len(rule.id) for rule in rules]) + for rule in rules.alphabetical(): + table = Table(show_header=True, header_style="title", box=box.MINIMAL) + table.add_column(rule.id, style="dim", width=width) + table.add_column(Markdown(rule.shortdesc)) + + description = rule.help or rule.description + if rule.link: + description += f" [(more)]({rule.link})" + table.add_row("description", Markdown(description)) + if rule.version_added: + table.add_row("version_added", rule.version_added) + if rule.tags: + table.add_row("tags", ", ".join(rule.tags)) + if rule.severity: + table.add_row("severity", rule.severity) + yield table + + +def profiles_as_md(header: bool = False, docs_url: str = RULE_DOC_URL) -> str: + """Return markdown representation of supported profiles.""" + result = "" + + if header: + result += """<!--- +Do not manually edit, generated from generate_docs.py +--> +# Profiles + +Ansible-lint profiles gradually increase the strictness of rules as your Ansible content lifecycle. + +!!! note + + Rules with `*` in the suffix are not yet implemented but are documented with linked GitHub issues. + +""" + + for name, profile in PROFILES.items(): + extends = "" + if profile.get("extends", None): + extends = ( + f" It extends [{profile['extends']}](#{profile['extends']}) profile." + ) + result += f"## {name}\n\n{profile['description']}{extends}\n" + for rule, rule_data in profile["rules"].items(): + if "[" in rule: + url = f"{docs_url}{rule.split('[')[0]}/" + else: + url = f"{docs_url}{rule}/" + if not rule_data: + result += f"- [{rule}]({url})\n" + else: + result += f"- [{rule}]({rule_data['url']})\n" + + result += "\n" + return result + + +def profiles_as_rich() -> Markdown: + """Return rich representation of supported profiles.""" + return Markdown(profiles_as_md()) diff --git a/src/ansiblelint/loaders.py b/src/ansiblelint/loaders.py new file mode 100644 index 0000000..6108701 --- /dev/null +++ b/src/ansiblelint/loaders.py @@ -0,0 +1,57 @@ +"""Utilities for loading various files.""" +from __future__ import annotations + +import logging +import os +from collections import defaultdict +from functools import partial +from pathlib import Path +from typing import Any + +import yaml +from yaml import YAMLError + +try: + from yaml import CFullLoader as FullLoader + from yaml import CSafeLoader as SafeLoader +except (ImportError, AttributeError): + from yaml import FullLoader, SafeLoader # type: ignore + +IGNORE_TXT = ".ansible-lint-ignore" +yaml_load = partial(yaml.load, Loader=FullLoader) +yaml_load_safe = partial(yaml.load, Loader=SafeLoader) +_logger = logging.getLogger(__name__) + + +def yaml_from_file(filepath: str | Path) -> Any: + """Return a loaded YAML file.""" + with open(str(filepath), encoding="utf-8") as content: + return yaml_load(content) + + +def load_ignore_txt(filepath: str | Path = IGNORE_TXT) -> dict[str, set[str]]: + """Return a list of rules to ignore.""" + result = defaultdict(set) + if os.path.isfile(filepath): + with open(str(filepath), encoding="utf-8") as content: + _logger.debug("Loading ignores from %s", filepath) + for line in content: + entry = line.split("#")[0].rstrip() + if entry: + try: + path, rule = entry.split() + except ValueError as exc: + raise RuntimeError( + f"Unable to parse line '{line}' from {filepath} file." + ) from exc + result[path].add(rule) + return result + + +__all__ = [ + "load_ignore_txt", + "yaml_from_file", + "yaml_load", + "yaml_load_safe", + "YAMLError", +] diff --git a/src/ansiblelint/logger.py b/src/ansiblelint/logger.py new file mode 100644 index 0000000..18b36f4 --- /dev/null +++ b/src/ansiblelint/logger.py @@ -0,0 +1,18 @@ +"""Utils related to logging.""" +import logging +import time +from contextlib import contextmanager +from typing import Any, Iterator + +_logger = logging.getLogger(__name__) + + +@contextmanager +def timed_info(msg: Any, *args: Any) -> Iterator[None]: + """Context manager for logging slow operations, mentions duration.""" + start = time.time() + try: + yield + finally: + elapsed = time.time() - start + _logger.info(msg + " (%.2fs)", *(*args, elapsed)) diff --git a/src/ansiblelint/py.typed b/src/ansiblelint/py.typed new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/ansiblelint/py.typed diff --git a/src/ansiblelint/rules/__init__.py b/src/ansiblelint/rules/__init__.py new file mode 100644 index 0000000..5c87165 --- /dev/null +++ b/src/ansiblelint/rules/__init__.py @@ -0,0 +1,549 @@ +"""All internal ansible-lint rules.""" +from __future__ import annotations + +import copy +import inspect +import logging +import re +import sys +from argparse import Namespace +from collections import defaultdict +from functools import lru_cache +from importlib import import_module +from pathlib import Path +from typing import Any, Iterable, Iterator, MutableMapping, MutableSequence, cast + +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +import ansiblelint.skip_utils +import ansiblelint.utils +import ansiblelint.yaml_utils +from ansiblelint._internal.rules import ( + AnsibleParserErrorRule, + BaseRule, + LoadingFailureRule, + RuntimeErrorRule, + WarningRule, +) +from ansiblelint.config import PROFILES, get_rule_config +from ansiblelint.config import options as default_options +from ansiblelint.constants import LINE_NUMBER_KEY, RULE_DOC_URL, SKIPPED_RULES_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, expand_paths_vars + +_logger = logging.getLogger(__name__) + +match_types = { + "matchlines": "line", + "match": "line", # called by matchlines + "matchtasks": "task", + "matchtask": "task", # called by matchtasks + "matchyaml": "yaml", + "matchplay": "play", # called by matchyaml + "matchdir": "dir", +} + + +class AnsibleLintRule(BaseRule): + """AnsibleLintRule should be used as base for writing new rules.""" + + @property + def url(self) -> str: + """Return rule documentation url.""" + return RULE_DOC_URL + self.id + "/" + + @property + def rule_config(self) -> dict[str, Any]: + """Retrieve rule specific configuration.""" + return get_rule_config(self.id) + + @lru_cache(maxsize=256) + def get_config(self, key: str) -> Any: + """Return a configured value for given key string.""" + return self.rule_config.get(key, None) + + @staticmethod + def unjinja(text: str) -> str: + """Remove jinja2 bits from a string.""" + text = re.sub(r"{{.+?}}", "JINJA_EXPRESSION", text) + text = re.sub(r"{%.+?%}", "JINJA_STATEMENT", text) + text = re.sub(r"{#.+?#}", "JINJA_COMMENT", text) + return text + + # pylint: disable=too-many-arguments + def create_matcherror( + self, + message: str | None = None, + linenumber: int = 1, + details: str = "", + filename: Lintable | None = None, + tag: str = "", + ) -> MatchError: + """Instantiate a new MatchError.""" + match = MatchError( + message=message, + linenumber=linenumber, + details=details, + filename=filename, + rule=copy.copy(self), + ) + if tag: + match.tag = tag + # search through callers to find one of the match* methods + frame = inspect.currentframe() + match_type: str | None = None + while not match_type and frame is not None: + func_name = frame.f_code.co_name + match_type = match_types.get(func_name, None) + if match_type: + # add the match_type to the match + match.match_type = match_type + break + frame = frame.f_back # get the parent frame for the next iteration + return match + + @staticmethod + def _enrich_matcherror_with_task_details( + match: MatchError, task: dict[str, Any] + ) -> None: + match.task = task + if not match.details: + match.details = "Task/Handler: " + ansiblelint.utils.task_to_str(task) + if match.linenumber < task[LINE_NUMBER_KEY]: + match.linenumber = task[LINE_NUMBER_KEY] + + def matchlines(self, file: Lintable) -> list[MatchError]: + matches: list[MatchError] = [] + # arrays are 0-based, line numbers are 1-based + # so use prev_line_no as the counter + for prev_line_no, line in enumerate(file.content.split("\n")): + if line.lstrip().startswith("#"): + continue + + rule_id_list = ansiblelint.skip_utils.get_rule_skips_from_line(line) + if self.id in rule_id_list: + continue + + result = self.match(line) + if not result: + continue + message = None + if isinstance(result, str): + message = result + matcherror = self.create_matcherror( + message=message, + linenumber=prev_line_no + 1, + details=line, + filename=file, + ) + matches.append(matcherror) + return matches + + # pylint: disable=too-many-branches + def matchtasks(self, file: Lintable) -> list[MatchError]: # noqa: C901 + """Call matchtask for each task inside file and return aggregate results. + + Most rules will never need to override matchtasks because its main + purpose is to call matchtask for each task/handlers in the same file, + and to aggregate the results. + """ + matches: list[MatchError] = [] + if ( + file.kind not in ["handlers", "tasks", "playbook"] + or str(file.base_kind) != "text/yaml" + ): + return matches + + tasks_iterator = ansiblelint.yaml_utils.iter_tasks_in_file(file) + for raw_task, task, skipped_tags, error in tasks_iterator: + if error is not None: + # normalize_task converts AnsibleParserError to MatchError + return [error] + + if ( + self.id in skipped_tags + or ("action" not in task) + or "skip_ansible_lint" in task.get("tags", []) + ): + continue + + if self.needs_raw_task: + task["__raw_task__"] = raw_task + + result = self.matchtask(task, file=file) + if not result: + continue + + if isinstance(result, Iterable) and not isinstance( + result, str + ): # list[MatchError] + # https://github.com/PyCQA/pylint/issues/6044 + # pylint: disable=not-an-iterable + for match in result: + if match.tag in skipped_tags: + continue + self._enrich_matcherror_with_task_details(match, task) + matches.append(match) + continue + if isinstance(result, MatchError): + if result.tag in skipped_tags: + continue + match = result + else: # bool or string + message = None + if isinstance(result, str): + message = result + match = self.create_matcherror( + message=message, + linenumber=task[LINE_NUMBER_KEY], + filename=file, + ) + + self._enrich_matcherror_with_task_details(match, task) + matches.append(match) + return matches + + def matchyaml(self, file: Lintable) -> list[MatchError]: + matches: list[MatchError] = [] + if str(file.base_kind) != "text/yaml": + return matches + + yaml = file.data + # yaml returned can be an AnsibleUnicode (a string) when the yaml + # file contains a single string. YAML spec allows this but we consider + # this an fatal error. + if isinstance(yaml, str): + if yaml.startswith("$ANSIBLE_VAULT"): + return [] + return [MatchError(filename=file, rule=LoadingFailureRule())] + if not yaml: + return matches + + if isinstance(yaml, dict): + yaml = [yaml] + + for play in yaml: + # Bug #849 + if play is None: + continue + + if self.id in play.get(SKIPPED_RULES_KEY, ()): + continue + + if "skip_ansible_lint" in play.get("tags", []): + continue + + matches.extend(self.matchplay(file, play)) + + return matches + + +class TransformMixin: + """A mixin for AnsibleLintRule to enable transforming files. + + If ansible-lint is started with the ``--write`` option, then the ``Transformer`` + will call the ``transform()`` method for every MatchError identified if the rule + that identified it subclasses this ``TransformMixin``. Only the rule that identified + a MatchError can do transforms to fix that match. + """ + + def transform( + self, + match: MatchError, + lintable: Lintable, + data: CommentedMap | CommentedSeq | str, + ) -> None: + """Transform ``data`` to try to fix the MatchError identified by this rule. + + The ``match`` was generated by this rule in the ``lintable`` file. + When ``transform()`` is called on a rule, the rule should either fix the + issue, if possible, or make modifications that make it easier to fix manually. + + The transform must set ``match.fixed = True`` when data has been transformed to + fix the error. + + For YAML files, ``data`` is an editable YAML dict/array that preserves + any comments that were in the original file. + + .. code:: python + + data[0]["tasks"][0]["when"] = False + + This is easier with the ``seek()`` utility method: + + .. code :: python + + target_task = self.seek(match.yaml_path, data) + target_task["when"] = False + + For any files that aren't YAML, ``data`` is the loaded file's content as a string. + To edit non-YAML files, save the updated contents in ``lintable.content``: + + .. code:: python + + new_data = self.do_something_to_fix_the_match(data) + lintable.content = new_data + """ + + @staticmethod + def seek( + yaml_path: list[int | str], + data: MutableMapping[str, Any] | MutableSequence[Any] | str, + ) -> Any: + """Get the element identified by ``yaml_path`` in ``data``. + + Rules that work with YAML need to seek, or descend, into nested YAML data + structures to perform the relevant transforms. For example: + + .. code:: python + + def transform(self, match, lintable, data): + target_task = self.seek(match.yaml_path, data) + # transform target_task + """ + if isinstance(data, str): + # can't descend into a string + return data + target = data + for segment in yaml_path: + # The cast() calls tell mypy what types we expect. + # Essentially this does: + # target = target[segment] + if isinstance(segment, str): + target = cast(MutableMapping[str, Any], target)[segment] + elif isinstance(segment, int): + target = cast(MutableSequence[Any], target)[segment] + return target + + +# pylint: disable=too-many-nested-blocks +def load_plugins( # noqa: max-complexity: 11 + dirs: list[str], +) -> Iterator[AnsibleLintRule]: + """Yield a rule class.""" + + def all_subclasses(cls: type) -> set[type]: + return set(cls.__subclasses__()).union( + [s for c in cls.__subclasses__() for s in all_subclasses(c)] + ) + + orig_sys_path = sys.path.copy() + + for directory in dirs: + if directory not in sys.path: + sys.path.append(str(directory)) + + # load all modules in the directory + for f in Path(directory).glob("*.py"): + if "__" not in f.stem and f.stem not in "conftest": + import_module(f"{f.stem}") + # restore sys.path + sys.path = orig_sys_path + + rules: dict[str, BaseRule] = {} + for rule in all_subclasses(BaseRule): + # we do not return the rules that are not loaded from passed 'directory' + # or rules that do not have a valid id. For example, during testing + # python may load other rule classes, some outside the tested rule + # directories. + if getattr(rule, "id") and Path(inspect.getfile(rule)).parent.absolute() in [ + Path(x).absolute() for x in dirs + ]: + if issubclass(rule, BaseRule) and rule.id not in rules: + rules[rule.id] = rule() + for rule in rules.values(): # type: ignore + if isinstance(rule, AnsibleLintRule) and bool(rule.id): + yield rule + + +class RulesCollection: + """Container for a collection of rules.""" + + def __init__( + self, + rulesdirs: list[str] | None = None, + options: Namespace = default_options, + profile_name: str | None = None, + conditional: bool = True, + ) -> None: + """Initialize a RulesCollection instance.""" + self.options = options + self.profile = [] + if profile_name: + self.profile = PROFILES[profile_name] + if rulesdirs is None: + rulesdirs = [] + self.rulesdirs = expand_paths_vars(rulesdirs) + self.rules: list[BaseRule] = [] + # internal rules included in order to expose them for docs as they are + # not directly loaded by our rule loader. + self.rules.extend( + [ + RuntimeErrorRule(), + AnsibleParserErrorRule(), + LoadingFailureRule(), + WarningRule(), + ] + ) + for rule in load_plugins(rulesdirs): + self.register(rule, conditional=conditional) + self.rules = sorted(self.rules) + + # When we have a profile we unload some of the rules + # But we do include all rules when listing all rules or tags + if profile_name and not (self.options.list_rules or self.options.list_tags): + filter_rules_with_profile(self.rules, profile_name) + + def register(self, obj: AnsibleLintRule, conditional: bool = False) -> None: + """Register a rule.""" + # We skip opt-in rules which were not manually enabled. + # But we do include opt-in rules when listing all rules or tags + if any( + [ + not conditional, + self.profile, # when profile is used we load all rules and filter later + "opt-in" not in obj.tags, + obj.id in self.options.enable_list, + self.options.list_rules, + self.options.list_tags, + ] + ): + obj._collection = self # pylint: disable=protected-access + self.rules.append(obj) + + def __iter__(self) -> Iterator[BaseRule]: + """Return the iterator over the rules in the RulesCollection.""" + return iter(sorted(self.rules)) + + def alphabetical(self) -> Iterator[BaseRule]: + """Return an iterator over the rules in the RulesCollection in alphabetical order.""" + return iter(sorted(self.rules, key=lambda x: x.id)) + + def __len__(self) -> int: + """Return the length of the RulesCollection data.""" + return len(self.rules) + + def extend(self, more: list[AnsibleLintRule]) -> None: + """Combine rules.""" + self.rules.extend(more) + + def run( # noqa: max-complexity: 12 + self, + file: Lintable, + tags: set[str] | None = None, + skip_list: list[str] | None = None, + ) -> list[MatchError]: + """Run all the rules against the given lintable.""" + matches: list[MatchError] = [] + if tags is None: + tags = set() + if skip_list is None: + skip_list = [] + + if not file.path.is_dir(): + try: + if file.content is not None: # loads the file content + pass + except (OSError, UnicodeDecodeError) as exc: + return [ + MatchError( + message=str(exc), + filename=file, + rule=LoadingFailureRule(), + tag=f"{LoadingFailureRule.id}[{exc.__class__.__name__.lower()}]", + ) + ] + + for rule in self.rules: + if rule.id == "syntax-check": + continue + if ( + not tags + or rule.has_dynamic_tags + or not set(rule.tags).union([rule.id]).isdisjoint(tags) + ): + rule_definition = set(rule.tags) + rule_definition.add(rule.id) + if set(rule_definition).isdisjoint(skip_list): + matches.extend(rule.getmatches(file)) + + # some rules can produce matches with tags that are inside our + # skip_list, so we need to cleanse the matches + matches = [m for m in matches if m.tag not in skip_list] + + return matches + + def __repr__(self) -> str: + """Return a RulesCollection instance representation.""" + return "\n".join( + [rule.verbose() for rule in sorted(self.rules, key=lambda x: x.id)] + ) + + def list_tags(self) -> str: + """Return a string with all the tags in the RulesCollection.""" + tag_desc = { + "command-shell": "Specific to use of command and shell modules", + "core": "Related to internal implementation of the linter", + "deprecations": "Indicate use of features that are removed from Ansible", + "experimental": "Newly introduced rules, by default triggering only warnings", + "formatting": "Related to code-style", + "idempotency": "Possible indication that consequent runs would produce different results", + "idiom": "Anti-pattern detected, likely to cause undesired behavior", + "metadata": "Invalid metadata, likely related to galaxy, collections or roles", + "opt-in": "Rules that are not used unless manually added to `enable_list`", + "security": "Rules related o potentially security issues, like exposing credentials", + "unpredictability": "Warn about code that might not work in a predictable way", + "unskippable": "Indicate a fatal error that cannot be ignored or disabled", + "yaml": "External linter which will also produce its own rule codes", + } + + tags = defaultdict(list) + for rule in self.rules: + for tag in rule.tags: + tags[tag].append(rule.id) + result = "# List of tags and rules they cover\n" + for tag in sorted(tags): + desc = tag_desc.get(tag, None) + if desc: + result += f"{tag}: # {desc}\n" + else: + result += f"{tag}:\n" + # result += f" rules:\n" + for name in tags[tag]: + result += f" - {name}\n" + return result + + +def filter_rules_with_profile(rule_col: list[BaseRule], profile: str) -> None: + """Unload rules that are not part of the specified profile.""" + included = set() + extends = profile + total_rules = len(rule_col) + while extends: + for rule in PROFILES[extends]["rules"]: + _logger.debug("Activating rule `%s` due to profile `%s`", rule, extends) + included.add(rule) + extends = PROFILES[extends].get("extends", None) + for rule in rule_col.copy(): + if rule.id not in included: + _logger.debug( + "Unloading %s rule due to not being part of %s profile.", + rule.id, + profile, + ) + rule_col.remove(rule) + else: + for tag in ("opt-in", "experimental"): + if tag in rule.tags: + _logger.debug( + "Removing tag `%s` from `%s` rule because `%s` profile makes it mandatory.", + tag, + rule.id, + profile, + ) + rule.tags.remove(tag) + # rule_col.rules.remove(rule) + # break + if "opt-in" in rule.tags: + rule.tags.remove("opt-in") + _logger.debug("%s/%s rules included in the profile", len(rule_col), total_rules) diff --git a/src/ansiblelint/rules/args.md b/src/ansiblelint/rules/args.md new file mode 100644 index 0000000..567d0fd --- /dev/null +++ b/src/ansiblelint/rules/args.md @@ -0,0 +1,91 @@ +# args + +This rule validates if the task arguments conform with the plugin documentation. + +The rule validation will check if the option name is valid and has the correct +value along with conditionals on the options like `mutually_exclusive`, +`required_together`, `required_one_of` and so on. + +For more information see the +[argument spec validator](https://docs.ansible.com/ansible/latest/reference_appendices/module_utils.html#argumentspecvalidator) +topic in the Ansible module utility documentation. + +Possible messages: + +- `args[module]` - missing required arguments: ... +- `args[module]` - missing parameter(s) required by ... + +## Problematic Code + +```yaml +--- +- name: Fixture to validate module options failure scenarios + hosts: localhost + tasks: + - name: Clone content repository + ansible.builtin.git: # <- Required option `repo` is missing. + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + ansible.builtin.systemd: # <- Missing 'name' parameter required by 'enabled'. + enabled: true + masked: false + + - name: Use quiet to avoid verbose output + ansible.builtin.assert: + test: + - my_param <= 100 + - my_param >= 0 + quiet: invalid # <- Value for option `quiet` is invalid. +``` + +## Correct Code + +```yaml +--- +- name: Fixture to validate module options pass scenario + hosts: localhost + tasks: + - name: Clone content repository + ansible.builtin.git: # <- Contains required option `repo`. + repo: https://github.com/ansible/ansible-examples + dest: /home/www + accept_hostkey: true + version: master + update: false + + - name: Enable service httpd and ensure it is not masked + ansible.builtin.systemd: # <- Contains 'name' parameter required by 'enabled'. + name: httpd + enabled: false + masked: false + + - name: Use quiet to avoid verbose output + ansible.builtin.assert: + that: + - my_param <= 100 + - my_param >= 0 + quiet: True # <- Has correct type value for option `quiet` which is boolean. +``` + +## Special cases + +In some complex cases where you are using jinja expressions, the linter may not +able to fully validate all the possible values and report a false positive. The +example below would usually report +`parameters are mutually exclusive: data|file|keyserver|url` but because we +added `# noqa: args[module]` it will just pass. + +```yaml +- name: Add apt keys # noqa: args[module] + become: true + ansible.builtin.apt_key: + url: "{{ zj_item['url'] | default(omit) }}" + data: "{{ zj_item['data'] | default(omit) }}" + loop: "{{ repositories_keys }}" + loop_control: + loop_var: zj_item +``` diff --git a/src/ansiblelint/rules/args.py b/src/ansiblelint/rules/args.py new file mode 100644 index 0000000..0ff532e --- /dev/null +++ b/src/ansiblelint/rules/args.py @@ -0,0 +1,280 @@ +"""Rule definition to validate task options.""" +from __future__ import annotations + +import contextlib +import importlib.util +import io +import json +import logging +import re +import sys +from functools import lru_cache +from typing import Any + +# pylint: disable=preferred-module +from unittest import mock +from unittest.mock import patch + +# pylint: disable=reimported +import ansible.module_utils.basic as mock_ansible_module +from ansible.module_utils import basic +from ansible.plugins import loader + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.text import has_jinja +from ansiblelint.yaml_utils import clean_json + +_logger = logging.getLogger(__name__) + + +@lru_cache +def load_module(module_name: str) -> loader.PluginLoadContext: + """Load plugin from module name and cache it.""" + return loader.module_loader.find_plugin_with_context(module_name) + + +class ValidationPassed(Exception): + """Exception to be raised when validation passes.""" + + +class CustomAnsibleModule(basic.AnsibleModule): # type: ignore + """Mock AnsibleModule class.""" + + def __init__(self, *args: str, **kwargs: str) -> None: + """Initialize AnsibleModule mock.""" + super().__init__(*args, **kwargs) + raise ValidationPassed + + +class ArgsRule(AnsibleLintRule): + """Validating module arguments.""" + + id = "args" + severity = "HIGH" + description = "Check whether tasks are using correct module options." + tags = ["syntax", "experimental"] + version_added = "v6.10.0" + module_aliases: dict[str, str] = {"block/always/rescue": "block/always/rescue"} + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + # pylint: disable=too-many-branches,too-many-locals + results: list[MatchError] = [] + module_name = task["action"]["__ansible_module_original__"] + failed_msg = None + + if module_name in self.module_aliases: + return [] + + loaded_module = load_module(module_name) + module_args = { + key: value + for key, value in task["action"].items() + if not key.startswith("__") + } + # https://github.com/ansible/ansible-lint/issues/2824 + if loaded_module.resolved_fqcn == "ansible.builtin.async_status": + module_args["_async_dir"] = "/tmp/ansible-async" + if loaded_module.resolved_fqcn == "ansible.builtin.service": + _logger.debug( + "Skipped service module validation as not being supported yet." + ) + return [] + + with mock.patch.object( + mock_ansible_module, "AnsibleModule", CustomAnsibleModule + ): + spec = importlib.util.spec_from_file_location( + name=loaded_module.resolved_fqcn, + location=loaded_module.plugin_resolved_path, + ) + if spec: + assert spec.loader is not None + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + else: + assert file is not None + _logger.warning( + "Unable to load module %s at %s:%s for options validation", + module_name, + file.filename, + task[LINE_NUMBER_KEY], + ) + return [] + + try: + if not hasattr(module, "main"): + # skip validation for module options that are implemented as action plugin + # as the option values can be changed in action plugin and are not passed + # through `ArgumentSpecValidator` class as in case of modules. + return [] + + with patch.object( + sys, + "argv", + ["", json.dumps({"ANSIBLE_MODULE_ARGS": clean_json(module_args)})], + ): + fio = io.StringIO() + failed_msg = "" + # Warning: avoid running anything while stdout is redirected + # as what happens may be very hard to debug. + with contextlib.redirect_stdout(fio): + # pylint: disable=protected-access + basic._ANSIBLE_ARGS = None + try: + module.main() + except SystemExit: + failed_msg = fio.getvalue() + if failed_msg: + results.extend( + self._parse_failed_msg(failed_msg, task, module_name, file) + ) + + sanitized_results = self._sanitize_results(results, module_name) + return sanitized_results + except ValidationPassed: + return [] + + def _sanitize_results( + self, results: list[MatchError], module_name: str + ) -> list[MatchError]: + """Remove results that are false positive.""" + sanitized_results = [] + for result in results: + result_msg = result.message + if result_msg.startswith("Unsupported parameters"): + # cmd option is a special case in command module and after option validation is done. + if ( + "Unsupported parameters for (basic.py) module" in result_msg + and module_name + in ["command", "ansible.builtin.command", "ansible.legacy.command"] + ): + continue + result.message = result_msg.replace("(basic.py)", f"{module_name}") + elif result_msg.startswith("missing required arguments"): + if ( + "missing required arguments: free_form" in result_msg + and module_name + in [ + "raw", + "ansible.builtin.raw", + "ansible.legacy.raw", + "meta", + "ansible.builtin.meta", + "ansible.legacy.meta", + ] + ): + # free_form option is a special case in raw module hence ignore this error. + continue + if ( + "missing required arguments: key_value" in result_msg + and module_name + in [ + "set_fact", + "ansible.builtin.set_fact", + "ansible.legacy.set_fact", + ] + ): + # handle special case for set_fact module with key and value + continue + if "Supported parameters include" in result_msg and module_name in [ + "set_fact", + "ansible.builtin.set_fact", + "ansible.legacy.set_fact", + ]: + continue + sanitized_results.append(result) + + return sanitized_results + + def _parse_failed_msg( + self, + failed_msg: str, + task: dict[str, Any], + module_name: str, + file: Lintable | None = None, + ) -> list[MatchError]: + """Parse failed message and return list of MatchError.""" + results: list[MatchError] = [] + try: + failed_obj = json.loads(failed_msg) + error_message = failed_obj["msg"] + except json.decoder.JSONDecodeError: + error_message = failed_msg + + option_type_check_error = re.search( + r"argument '(?P<name>.*)' is of type", error_message + ) + if option_type_check_error: + # ignore options with templated variable value with type check errors + option_key = option_type_check_error.group("name") + option_value = task["action"][option_key] + if has_jinja(option_value): + _logger.debug( + "Type checking ignored for '%s' option in task '%s' at line %s.", + option_key, + module_name, + task[LINE_NUMBER_KEY], + ) + return results + + value_not_in_choices_error = re.search( + r"value of (?P<name>.*) must be one of:", error_message + ) + if value_not_in_choices_error: + # ignore templated value not in allowed choices + choice_key = value_not_in_choices_error.group("name") + choice_value = task["action"][choice_key] + if has_jinja(choice_value): + _logger.debug( + "Value checking ignored for '%s' option in task '%s' at line %s.", + choice_key, + module_name, + task[LINE_NUMBER_KEY], + ) + return results + + results.append( + self.create_matcherror( + message=error_message, + linenumber=task[LINE_NUMBER_KEY], + tag="args[module]", + filename=file, + ) + ) + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_args_module_fail() -> None: + """Test rule invalid module options.""" + collection = RulesCollection() + collection.register(ArgsRule()) + success = "examples/playbooks/rule-args-module-fail-1.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 5 + assert results[0].tag == "args[module]" + assert "missing required arguments" in results[0].message + assert results[1].tag == "args[module]" + assert "missing parameter(s) required by " in results[1].message + assert results[2].tag == "args[module]" + assert "Unsupported parameters for" in results[2].message + assert results[3].tag == "args[module]" + assert "Unsupported parameters for" in results[2].message + assert results[4].tag == "args[module]" + assert "value of state must be one of" in results[4].message + + def test_args_module_pass() -> None: + """Test rule valid module options.""" + collection = RulesCollection() + collection.register(ArgsRule()) + success = "examples/playbooks/rule-args-module-pass-1.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/avoid_implicit.md b/src/ansiblelint/rules/avoid_implicit.md new file mode 100644 index 0000000..4c3d781 --- /dev/null +++ b/src/ansiblelint/rules/avoid_implicit.md @@ -0,0 +1,37 @@ +# avoid-implicit + +This rule identifies the use of dangerous implicit behaviors, often also +undocumented. + +This rule will produce the following type of error messages: + +- `avoid-implicit[copy-content]` is not a string as [copy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html#synopsis) + modules also accept these, but without documenting them. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + ansible.builtin.copy: + content: { "foo": "bar" } # <-- should use explicit jinja template + dest: /tmp/foo.txt +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Write file content + vars: + content: { "foo": "bar" } + ansible.builtin.copy: + content: "{{ content | to_json }}" # explicit better than implicit! + dest: /tmp/foo.txt +``` diff --git a/src/ansiblelint/rules/avoid_implicit.py b/src/ansiblelint/rules/avoid_implicit.py new file mode 100644 index 0000000..dbd44d6 --- /dev/null +++ b/src/ansiblelint/rules/avoid_implicit.py @@ -0,0 +1,58 @@ +"""Implementation of avoid-implicit rule.""" +# https://github.com/ansible/ansible-lint/issues/2501 +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class AvoidImplicitRule(AnsibleLintRule): + """Rule that identifies use of undocumented or discouraged implicit behaviors.""" + + id = "avoid-implicit" + shortdesc = "Avoid implicit behaviors" + description = ( + "Items which are templated should use ``template`` instead of " + "``copy`` with ``content`` to ensure correctness." + ) + severity = "MEDIUM" + tags = ["unpredictability"] + version_added = "v6.8.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Confirm if current rule is matching a specific task.""" + if task["action"]["__ansible_module__"] == "copy": + content = task["action"].get("content", "") + if not isinstance(content, str): + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_template_instead_of_copy_positive() -> None: + """Positive test for avoid-implicit.""" + collection = RulesCollection() + collection.register(AvoidImplicitRule()) + success = "examples/playbooks/rule-avoid-implicit-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_template_instead_of_copy_negative() -> None: + """Negative test for avoid-implicit.""" + collection = RulesCollection() + collection.register(AvoidImplicitRule()) + failure = "examples/playbooks/rule-avoid-implicit-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 diff --git a/src/ansiblelint/rules/command_instead_of_module.md b/src/ansiblelint/rules/command_instead_of_module.md new file mode 100644 index 0000000..a4e69b0 --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_module.md @@ -0,0 +1,35 @@ +# command-instead-of-module + +This rule will recommend you to use a specific ansible module instead for tasks +that are better served by a module, as these are more reliable, provide better +messaging and usually have additional features like the ability to retry. + +In the unlikely case that the rule triggers false positives, you can disable it +by adding a comment like `# noqa: command-instead-of-module` to the same line. + +You can check the [source](https://github.com/ansible/ansible-lint/blob/main/src/ansiblelint/rules/command_instead_of_module.py) +of the rule for all the known commands that trigger the rule and their allowed +list arguments of exceptions and raise a pull request to improve them. + +## Problematic Code + +```yaml +--- +- name: Update apt cache + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update # <-- better to use ansible.builtin.apt module +``` + +## Correct Code + +```yaml +--- +- name: Update apt cache + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.apt: + update_cache: true +``` diff --git a/src/ansiblelint/rules/command_instead_of_module.py b/src/ansiblelint/rules/command_instead_of_module.py new file mode 100644 index 0000000..b55775f --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_module.py @@ -0,0 +1,262 @@ +"""Implementation of command-instead-of-module rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean, get_first_cmd_arg, get_second_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandsInsteadOfModulesRule(AnsibleLintRule): + """Using command rather than module.""" + + id = "command-instead-of-module" + description = ( + "Executing a command when there is an Ansible module is generally a bad idea" + ) + severity = "HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + _commands = ["command", "shell"] + _modules = { + "apt-get": "apt-get", + "chkconfig": "service", + "curl": "get_url or uri", + "git": "git", + "hg": "hg", + "letsencrypt": "acme_certificate", + "mktemp": "tempfile", + "mount": "mount", + "patch": "patch", + "rpm": "yum or rpm_key", + "rsync": "synchronize", + "sed": "template, replace or lineinfile", + "service": "service", + "supervisorctl": "supervisorctl", + "svn": "subversion", + "systemctl": "systemd", + "tar": "unarchive", + "unzip": "unarchive", + "wget": "get_url or uri", + "yum": "yum", + } + + _executable_options = { + "git": ["branch", "log", "lfs"], + "systemctl": ["--version", "kill", "set-default", "show-environment", "status"], + "yum": ["clean"], + "rpm": ["--nodeps"], + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] not in self._commands: + return False + + first_cmd_arg = get_first_cmd_arg(task) + second_cmd_arg = get_second_cmd_arg(task) + + if not first_cmd_arg: + return False + + executable = os.path.basename(first_cmd_arg) + + if ( + second_cmd_arg + and executable in self._executable_options + and second_cmd_arg in self._executable_options[executable] + ): + return False + + if executable in self._modules and convert_to_boolean( + task["action"].get("warn", True) + ): + message = "{0} used in place of {1} module" + return message.format(executable, self._modules[executable]) + return False + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + APT_GET = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update +""" + + GIT_COMMANDS_OK = """ +- hosts: all + tasks: + - name: Print current git branch + command: git branch + - name: Print git log + command: git log + - name: Install git lfs support + command: git lfs install +""" + + RESTART_SSHD = """ +- hosts: all + tasks: + - name: Restart sshd + command: systemctl restart sshd +""" + + SYSTEMCTL_STATUS = """ +- hosts: all + tasks: + - name: Show systemctl service status + command: systemctl status systemd-timesyncd +""" + + SYSTEMD_ENVIRONMENT = """ +- hosts: all + tasks: + - name: Show systemd environment + command: systemctl show-environment +""" + + SYSTEMD_RUNLEVEL = """ +- hosts: all + tasks: + - name: Set systemd runlevel + command: systemctl set-default multi-user.target +""" + + SYSTEMD_KILL = """ +- hosts: all + tasks: + - name: Kill service using SIGUSR1 + command: systemctl kill --signal=SIGUSR1 sshd +""" + + YUM_UPDATE = """ +- hosts: all + tasks: + - name: Run yum update + command: yum update +""" + + YUM_CLEAN = """ +- hosts: all + tasks: + - name: Clear yum cache + command: yum clean all +""" + + NO_COMMAND = """ +- hosts: all + tasks: + - name: Clear yum cache + command: "" +""" + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_apt_get(rule_runner: RunFromText) -> None: + """The apt module supports update.""" + results = rule_runner.run_playbook(APT_GET) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_restart_sshd(rule_runner: RunFromText) -> None: + """Restarting services is supported by the systemd module.""" + results = rule_runner.run_playbook(RESTART_SSHD) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_git_commands_ok(rule_runner: RunFromText) -> None: + """Check the git commands not supported by the git module do not trigger rule.""" + results = rule_runner.run_playbook(GIT_COMMANDS_OK) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_status(rule_runner: RunFromText) -> None: + """Set-default is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMCTL_STATUS) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_environment(rule_runner: RunFromText) -> None: + """Showing the environment is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_ENVIRONMENT) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_runlevel(rule_runner: RunFromText) -> None: + """Set-default is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_RUNLEVEL) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_systemd_kill(rule_runner: RunFromText) -> None: + """Kill is not supported by the systemd module.""" + results = rule_runner.run_playbook(SYSTEMD_KILL) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_yum_update(rule_runner: RunFromText) -> None: + """Using yum update should fail.""" + results = rule_runner.run_playbook(YUM_UPDATE) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_yum_clean(rule_runner: RunFromText) -> None: + """The yum module does not support clearing yum cache.""" + results = rule_runner.run_playbook(YUM_CLEAN) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (CommandsInsteadOfModulesRule,), indirect=["rule_runner"] + ) + def test_no_command(rule_runner: RunFromText) -> None: + """If no command is passed it should return 0.""" + results = rule_runner.run_playbook(NO_COMMAND) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/command_instead_of_shell.md b/src/ansiblelint/rules/command_instead_of_shell.md new file mode 100644 index 0000000..0abf69d --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_shell.md @@ -0,0 +1,30 @@ +# command-instead-of-shell + +This rule identifies uses of `shell` modules instead of a `command` one when +this is not really needed. Shell is considerably slower than command and should +be avoided unless there is a special need for using shell features, like +environment variable expansion or chaining multiple commands using pipes. + +## Problematic Code + +```yaml +--- +- name: Problematic example + hosts: localhost + tasks: + - name: Echo a message + ansible.builtin.shell: echo hello # <-- command is better in this case + changed_when: false +``` + +## Correct Code + +```yaml +--- +- name: Correct example + hosts: localhost + tasks: + - name: Echo a message + ansible.builtin.command: echo hello + changed_when: false +``` diff --git a/src/ansiblelint/rules/command_instead_of_shell.py b/src/ansiblelint/rules/command_instead_of_shell.py new file mode 100644 index 0000000..d1cb5b1 --- /dev/null +++ b/src/ansiblelint/rules/command_instead_of_shell.py @@ -0,0 +1,177 @@ +"""Implementation of command-instead-of-shell rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +FAIL_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell no pipe + ansible.builtin.shell: + cmd: echo hello + changed_when: false + + - name: Shell with jinja filter + ansible.builtin.shell: + cmd: echo {{ "hello" | upper }} + changed_when: false + + - name: Shell with jinja filter (fqcn) + ansible.builtin.shell: + cmd: echo {{ "hello" | upper }} + changed_when: false + + - name: Command with executable parameter + ansible.builtin.shell: + cmd: clear + args: + executable: /bin/bash + changed_when: false +""" + +SUCCESS_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell with pipe + ansible.builtin.shell: + cmd: echo hello | true # noqa: risky-shell-pipe + changed_when: false + + - name: Shell with redirect + ansible.builtin.shell: + cmd: echo hello > /tmp/hello + changed_when: false + + - name: Chain two shell commands + ansible.builtin.shell: + cmd: echo hello && echo goodbye + changed_when: false + + - name: Run commands in succession + ansible.builtin.shell: + cmd: echo hello ; echo goodbye + changed_when: false + + - name: Use variables + ansible.builtin.shell: + cmd: echo $HOME $USER + changed_when: false + + - name: Use * for globbing + ansible.builtin.shell: + cmd: ls foo* + changed_when: false + + - name: Use ? for globbing + ansible.builtin.shell: + cmd: ls foo? + changed_when: false + + - name: Use [] for globbing + ansible.builtin.shell: + cmd: ls foo[1,2,3] + changed_when: false + + - name: Use shell generator + ansible.builtin.shell: + cmd: ls foo{.txt,.xml} + changed_when: false + + - name: Use backticks + ansible.builtin.shell: + cmd: ls `ls foo*` + changed_when: false + + - name: Use shell with cmd + ansible.builtin.shell: + cmd: | + set -x + ls foo? + changed_when: false +""" + + +class UseCommandInsteadOfShellRule(AnsibleLintRule): + """Use shell only when shell functionality is required.""" + + id = "command-instead-of-shell" + description = ( + "Shell should only be used when piping, redirecting " + "or chaining commands (and Ansible would be preferred " + "for some of those!)" + ) + severity = "HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + # Use unjinja so that we don't match on jinja filters + # rather than pipes + if task["action"]["__ansible_module__"] in ["shell", "ansible.builtin.shell"]: + # Since Ansible 2.4, the `command` module does not accept setting + # the `executable`. If the user needs to set it, they have to use + # the `shell` module. + if "executable" in task["action"]: + return False + + if "cmd" in task["action"]: + jinja_stripped_cmd = self.unjinja(task["action"].get("cmd", [])) + else: + jinja_stripped_cmd = self.unjinja( + " ".join(task["action"].get("__ansible_arguments__", [])) + ) + return not any(ch in jinja_stripped_cmd for ch in "&|<>;$\n*[]{}?`") + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("text", "expected"), + ( + pytest.param(SUCCESS_PLAY, 0, id="good"), + pytest.param(FAIL_PLAY, 3, id="bad"), + ), + ) + def test_rule_command_instead_of_shell( + default_text_runner: RunFromText, text: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = default_text_runner.run_playbook(text) + for result in results: + assert result.rule.id == UseCommandInsteadOfShellRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/conftest.py b/src/ansiblelint/rules/conftest.py new file mode 100644 index 0000000..f4df7a5 --- /dev/null +++ b/src/ansiblelint/rules/conftest.py @@ -0,0 +1,3 @@ +"""Makes pytest fixtures available.""" +# pylint: disable=wildcard-import,unused-wildcard-import +from ansiblelint.testing.fixtures import * # noqa: F403 diff --git a/src/ansiblelint/rules/custom/__init__.py b/src/ansiblelint/rules/custom/__init__.py new file mode 100644 index 0000000..8c3e048 --- /dev/null +++ b/src/ansiblelint/rules/custom/__init__.py @@ -0,0 +1 @@ +"""A placeholder package for putting custom rules under this dir.""" diff --git a/src/ansiblelint/rules/deprecated_bare_vars.md b/src/ansiblelint/rules/deprecated_bare_vars.md new file mode 100644 index 0000000..9e2f15b --- /dev/null +++ b/src/ansiblelint/rules/deprecated_bare_vars.md @@ -0,0 +1,32 @@ +# deprecated-bare-vars + +This rule identifies possible confusing expressions where it is not clear if +a variable or string is to be used and asks for clarification. + +You should either use the full variable syntax ('{{{{ {0} }}}}') or, whenever +possible, convert it to a list of strings. + +## Problematic code + +```yaml +--- +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: foo # <-- deprecated-bare-vars +``` + +## Correct code + +```yaml +--- +# if foo is not really a variable: +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: + - foo + +# if foo is a variable: +- ansible.builtin.debug: + msg: "{{ item }}" + with_items: "{{ foo }}" +``` diff --git a/src/ansiblelint/rules/deprecated_bare_vars.py b/src/ansiblelint/rules/deprecated_bare_vars.py new file mode 100644 index 0000000..40912f8 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_bare_vars.py @@ -0,0 +1,117 @@ +"""Implementation of deprecated-bare-vars rule.""" + +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import has_glob, has_jinja + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class UsingBareVariablesIsDeprecatedRule(AnsibleLintRule): + """Using bare variables is deprecated.""" + + id = "deprecated-bare-vars" + description = ( + "Using bare variables is deprecated. Update your " + "playbooks so that the environment value uses the full variable " + "syntax ``{{ your_variable }}``" + ) + severity = "VERY_HIGH" + tags = ["deprecations"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + loop_type = next((key for key in task if key.startswith("with_")), None) + if loop_type: + if loop_type in [ + "with_nested", + "with_together", + "with_flattened", + "with_filetree", + "with_community.general.filetree", + ]: + # These loops can either take a list defined directly in the task + # or a variable that is a list itself. When a single variable is used + # we just need to check that one variable, and not iterate over it like + # it's a list. Otherwise, loop through and check all items. + items = task[loop_type] + if not isinstance(items, (list, tuple)): + items = [items] + for var in items: + return self._matchvar(var, task, loop_type) + elif loop_type == "with_subelements": + return self._matchvar(task[loop_type][0], task, loop_type) + elif loop_type in ["with_sequence", "with_ini", "with_inventory_hostnames"]: + pass + else: + return self._matchvar(task[loop_type], task, loop_type) + return False + + def _matchvar( + self, varstring: str, task: dict[str, Any], loop_type: str + ) -> bool | str: + if isinstance(varstring, str) and not has_jinja(varstring): + valid = loop_type == "with_fileglob" and bool( + has_jinja(varstring) or has_glob(varstring) + ) + + valid |= loop_type == "with_filetree" and bool( + has_jinja(varstring) or varstring.endswith(os.sep) + ) + if not valid: + message = ( + "Possible bare variable '{0}' used in a '{1}' loop." + + " You should use the full variable syntax ('{{{{ {0} }}}}') or convert it to a list if that is not really a variable." + ) + return message.format(task[loop_type], loop_type) + return False + + +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_use_bare_positive() -> None: + """Positive test for deprecated-bare-vars.""" + collection = RulesCollection() + collection.register(UsingBareVariablesIsDeprecatedRule()) + success = "examples/playbooks/rule-deprecated-bare-vars-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_use_bare_negative() -> None: + """Negative test for deprecated-bare-vars.""" + collection = RulesCollection() + collection.register(UsingBareVariablesIsDeprecatedRule()) + failure = "examples/playbooks/rule-deprecated-bare-vars-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 12 diff --git a/src/ansiblelint/rules/deprecated_command_syntax.md b/src/ansiblelint/rules/deprecated_command_syntax.md new file mode 100644 index 0000000..f99ca99 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_command_syntax.md @@ -0,0 +1,32 @@ +# deprecated-command-syntax + +This rule identifies the use of shorthand (free-form) syntax as this is highly +discouraged inside playbooks, mainly because it can easily lead to bugs that +are hard to identify. + +While using the free-form from the command line is ok, it should never be used +inside playbooks. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Perform chmod + ansible.builtin.command: creates=B chmod 644 A # <-- do not use shorthand +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Perform chmod + ansible.builtin.command: chmod 644 A + args: + creates: B +``` diff --git a/src/ansiblelint/rules/deprecated_command_syntax.py b/src/ansiblelint/rules/deprecated_command_syntax.py new file mode 100644 index 0000000..53ed94f --- /dev/null +++ b/src/ansiblelint/rules/deprecated_command_syntax.py @@ -0,0 +1,101 @@ +"""Implementation of deprecated-command-syntax rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import os +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean, get_first_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandsInsteadOfArgumentsRule(AnsibleLintRule): + """Using command rather than an argument to e.g. file.""" + + id = "deprecated-command-syntax" + description = ( + "Executing a command when there are arguments to modules " + "is generally a bad idea" + ) + severity = "VERY_HIGH" + tags = ["command-shell", "deprecations"] + version_added = "historic" + + _commands = ["command", "shell", "raw"] + _arguments = { + "chown": "owner", + "chmod": "mode", + "chgrp": "group", + "ln": "state=link", + "mkdir": "state=directory", + "rmdir": "state=absent", + "rm": "state=absent", + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in self._commands: + first_cmd_arg = get_first_cmd_arg(task) + if not first_cmd_arg: + return False + + executable = os.path.basename(first_cmd_arg) + if executable in self._arguments and convert_to_boolean( + task["action"].get("warn", True) + ): + message = "{0} used in place of argument {1} to file module" + return message.format(executable, self._arguments[executable]) + return False + + +DEPRECATED_COMMAND_PLAY = """--- +- name: Fixture + hosts: localhost + tasks: + - name: Shell with pipe + ansible.builtin.command: + err: echo hello | true # noqa: risky-shell-pipe + changed_when: false +""" + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("text", "expected"), + (pytest.param(DEPRECATED_COMMAND_PLAY, 0, id="no_first_cmd_arg"),), + ) + def test_rule_deprecated_command_no_first_cmd_arg( + default_text_runner: RunFromText, text: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = default_text_runner.run_playbook(text) + assert len(results) == expected diff --git a/src/ansiblelint/rules/deprecated_local_action.md b/src/ansiblelint/rules/deprecated_local_action.md new file mode 100644 index 0000000..d5e8361 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_local_action.md @@ -0,0 +1,21 @@ +# deprecated-local-action + +This rule recommends using `delegate_to: localhost` instead of the +`local_action`. + +## Problematic Code + +```yaml +--- +- name: Task example + local_action: # <-- this is deprecated + module: boto3_facts +``` + +## Correct Code + +```yaml +- name: Task example + boto3_facts: + delegate_to: localhost # <-- recommended way to run on localhost +``` diff --git a/src/ansiblelint/rules/deprecated_local_action.py b/src/ansiblelint/rules/deprecated_local_action.py new file mode 100644 index 0000000..c77e208 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_local_action.py @@ -0,0 +1,62 @@ +"""Implementation for deprecated-local-action rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class TaskNoLocalAction(AnsibleLintRule): + """Do not use 'local_action', use 'delegate_to: localhost'.""" + + id = "deprecated-local-action" + description = "Do not use ``local_action``, use ``delegate_to: localhost``" + needs_raw_task = True + severity = "MEDIUM" + tags = ["deprecations"] + version_added = "v4.0.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for a task.""" + raw_task = task["__raw_task__"] + if "local_action" in raw_task.keys(): + return True + + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.testing import RunFromText + + FAIL_TASK = """ + - name: Task example + local_action: + module: boto3_facts + """ + + SUCCESS_TASK = """ + - name: Task example + boto3_facts: + delegate_to: localhost # local_action + """ + + @pytest.mark.parametrize(("text", "expected"), ((SUCCESS_TASK, 0), (FAIL_TASK, 1))) + def test_local_action(text: str, expected: int) -> None: + """Positive test deprecated_local_action.""" + collection = RulesCollection() + collection.register(TaskNoLocalAction()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(text) + assert len(results) == expected diff --git a/src/ansiblelint/rules/deprecated_module.md b/src/ansiblelint/rules/deprecated_module.md new file mode 100644 index 0000000..c05d641 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_module.md @@ -0,0 +1,32 @@ +# deprecated-module + +This rule identifies deprecated modules in playbooks. +You should avoid using deprecated modules because they are not maintained, which can pose a security risk. +Additionally when a module is deprecated it is available temporarily with a plan for future removal. + +Refer to the [Ansible module index](https://docs.ansible.com/ansible/latest/collections/index_module.html) for information about replacements and removal dates for deprecated modules. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Configure VLAN ID + ansible.netcommon.net_vlan: # <- Uses a deprecated module. + vlan_id: 20 +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Configure VLAN ID + dellemc.enterprise_sonic.sonic_vlans: # <- Uses a platform specific module. + config: + - vlan_id: 20 +``` diff --git a/src/ansiblelint/rules/deprecated_module.py b/src/ansiblelint/rules/deprecated_module.py new file mode 100644 index 0000000..00a4523 --- /dev/null +++ b/src/ansiblelint/rules/deprecated_module.py @@ -0,0 +1,78 @@ +"""Implementation of deprecated-module rule.""" +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class DeprecatedModuleRule(AnsibleLintRule): + """Deprecated module.""" + + id = "deprecated-module" + description = ( + "These are deprecated modules, some modules are kept " + "temporarily for backwards compatibility but usage is discouraged." + ) + link = "https://docs.ansible.com/ansible/latest/collections/index_module.html" + severity = "HIGH" + tags = ["deprecations"] + version_added = "v4.0.0" + + _modules = [ + # spell-checker:disable + "accelerate", + "aos_asn_pool", + "aos_blueprint", + "aos_blueprint_param", + "aos_blueprint_virtnet", + "aos_device", + "aos_external_router", + "aos_ip_pool", + "aos_logical_device", + "aos_logical_device_map", + "aos_login", + "aos_rack_type", + "aos_template", + "azure", + "cl_bond", + "cl_bridge", + "cl_img_install", + "cl_interface", + "cl_interface_policy", + "cl_license", + "cl_ports", + "cs_nic", + "docker", + "ec2_ami_find", + "ec2_ami_search", + "ec2_remote_facts", + "ec2_vpc", + "kubernetes", + "netscaler", + "nxos_ip_interface", + "nxos_mtu", + "nxos_portchannel", + "nxos_switchport", + "oc", + "panos_nat_policy", + "panos_security_policy", + "vsphere_guest", + "win_msi", + "include", + # spell-checker:enable + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + if module in self._modules: + message = "{0} {1}" + return message.format(self.shortdesc, module) + return False diff --git a/src/ansiblelint/rules/empty_string_compare.md b/src/ansiblelint/rules/empty_string_compare.md new file mode 100644 index 0000000..c20bc51 --- /dev/null +++ b/src/ansiblelint/rules/empty_string_compare.md @@ -0,0 +1,44 @@ +# empty-string-compare + +This rule checks for empty string comparison in playbooks. +To ensure code clarity you should avoid using empty strings in conditional statements with the `when` clause. + +- Use `when: var | length > 0` instead of `when: var != ""`. +- Use `when: var | length == 0` instead of `when: var == ""`. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - empty-string-compare +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Shut down + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_os_family == "" # <- Compares with an empty string. + - name: Shut down + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_os_family !="" # <- Compares with an empty string. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Shut down + ansible.builtin.shell: | + /sbin/shutdown -t now + echo $var == + when: ansible_os_family +``` diff --git a/src/ansiblelint/rules/empty_string_compare.py b/src/ansiblelint/rules/empty_string_compare.py new file mode 100644 index 0000000..ae42ceb --- /dev/null +++ b/src/ansiblelint/rules/empty_string_compare.py @@ -0,0 +1,103 @@ +"""Implementation of empty-string-compare rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ComparisonToEmptyStringRule(AnsibleLintRule): + """Don't compare to empty string.""" + + id = "empty-string-compare" + description = ( + 'Use ``when: var|length > 0`` rather than ``when: var != ""`` (or ' + 'conversely ``when: var|length == 0`` rather than ``when: var == ""``)' + ) + severity = "HIGH" + tags = ["idiom", "opt-in"] + version_added = "v4.0.0" + + empty_string_compare = re.compile("[=!]= ?(\"{2}|'{2})") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + for k, v, _ in nested_items_path(task): + if k == "when": + if isinstance(v, str): + if self.empty_string_compare.search(v): + return True + elif isinstance(v, bool): + pass + else: + for item in v: + if isinstance(item, str) and self.empty_string_compare.search( + item + ): + return True + + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + SUCCESS_PLAY = """ +- hosts: all + tasks: + - name: Shut down + shell: | + /sbin/shutdown -t now + echo $var == "" + when: ansible_os_family + - name: Shut down + shell: | + /sbin/shutdown -t now + echo $var == "" + when: [ansible_os_family] +""" + + FAIL_PLAY = """ +- hosts: all + tasks: + - name: Shut down + command: /sbin/shutdown -t now + when: ansible_os_family == "" + - name: Shut down + command: /sbin/shutdown -t now + when: ansible_os_family !="" + - name: Shut down + command: /sbin/shutdown -t now + when: False +""" + + @pytest.mark.parametrize( + "rule_runner", (ComparisonToEmptyStringRule,), indirect=["rule_runner"] + ) + def test_rule_empty_string_compare_fail(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(FAIL_PLAY) + assert len(results) == 2 + for result in results: + assert result.message == ComparisonToEmptyStringRule().shortdesc + + @pytest.mark.parametrize( + "rule_runner", (ComparisonToEmptyStringRule,), indirect=["rule_runner"] + ) + def test_rule_empty_string_compare_pass(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(SUCCESS_PLAY) + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/fqcn.md b/src/ansiblelint/rules/fqcn.md new file mode 100644 index 0000000..fc96ed2 --- /dev/null +++ b/src/ansiblelint/rules/fqcn.md @@ -0,0 +1,76 @@ +# fqcn + +This rule checks for fully-qualified collection names (FQCN) in Ansible content. + +Declaring an FQCN ensures that an action uses code from the correct namespace. +This avoids ambiguity and conflicts that can cause operations to fail or produce +unexpected results. + +The `fqcn` rule has the following checks: + +- `fqcn[action]` - Use FQCN for module actions, such ... +- `fqcn[action-core]` - Checks for FQCNs from the `ansible.legacy` or + `ansible.builtin` collection. +- `fqcn[canonical]` - You should use canonical module name ... instead of ... +- `fqcn[keyword]` - Avoid `collections` keyword by using FQCN for all plugins, + modules, roles and playbooks. + +!!! note + + In most cases you should declare the `ansible.builtin` collection for internal Ansible actions. + You should declare the `ansible.legacy` collection if you use local overrides with actions, such with as the ``shell`` module. + +!!! warning + + This rule does not take [`collections` keyword](https://docs.ansible.com/ansible/latest/collections_guide/collections_using_playbooks.html#simplifying-module-names-with-the-collections-keyword) into consideration for resolving content. + The `collections` keyword provided a temporary mechanism transitioning to Ansible 2.9. + You should rewrite any content that uses the `collections:` key and avoid it where possible. + +## Canonical module names + +Canonical module names are also known as **resolved module names** and they are +to be preferred for most cases. Many Ansible modules have multiple aliases and +redirects, as these were created over time while the content was refactored. +Still, all of them do finally resolve to the same module name, but not without +adding some performance overhead. As very old aliases are at some point removed, +it makes to just refresh the content to make it point to the current canonical +name. + +The only exception for using a canonical name is if your code still needs to be +compatible with a very old version of Ansible, one that does not know how to +resolve that name. If you find yourself in such a situation, feel free to add +this rule to the ignored list. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Create an SSH connection + shell: ssh ssh_user@{{ ansible_ssh_host }} # <- Does not use the FQCN for the shell module. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook (1st solution) + hosts: all + tasks: + - name: Create an SSH connection + # Use the FQCN for the legacy shell module and allow local overrides. + ansible.legacy.shell: + ssh ssh_user@{{ ansible_ssh_host }} -o IdentityFile=path/to/my_rsa +``` + +```yaml +--- +- name: Example playbook (2nd solution) + hosts: all + tasks: + - name: Create an SSH connection + # Use the FQCN for the builtin shell module. + ansible.builtin.shell: ssh ssh_user@{{ ansible_ssh_host }} +``` diff --git a/src/ansiblelint/rules/fqcn.py b/src/ansiblelint/rules/fqcn.py new file mode 100644 index 0000000..64f4ee3 --- /dev/null +++ b/src/ansiblelint/rules/fqcn.py @@ -0,0 +1,202 @@ +"""Rule definition for usage of fully qualified collection names for builtins.""" +from __future__ import annotations + +import logging +import sys +from typing import Any + +from ansible.plugins.loader import module_loader + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection + +_logger = logging.getLogger(__name__) + +builtins = [ + # spell-checker:disable + "add_host", + "apt", + "apt_key", + "apt_repository", + "assemble", + "assert", + "async_status", + "blockinfile", + "command", + "copy", + "cron", + "debconf", + "debug", + "dnf", + "dpkg_selections", + "expect", + "fail", + "fetch", + "file", + "find", + "gather_facts", + "get_url", + "getent", + "git", + "group", + "group_by", + "hostname", + "import_playbook", + "import_role", + "import_tasks", + "include", + "include_role", + "include_tasks", + "include_vars", + "iptables", + "known_hosts", + "lineinfile", + "meta", + "package", + "package_facts", + "pause", + "ping", + "pip", + "raw", + "reboot", + "replace", + "rpm_key", + "script", + "service", + "service_facts", + "set_fact", + "set_stats", + "setup", + "shell", + "slurp", + "stat", + "subversion", + "systemd", + "sysvinit", + "tempfile", + "template", + "unarchive", + "uri", + "user", + "wait_for", + "wait_for_connection", + "yum", + "yum_repository", + # spell-checker:enable +] + + +class FQCNBuiltinsRule(AnsibleLintRule): + """Use FQCN for builtin actions.""" + + id = "fqcn" + severity = "MEDIUM" + description = ( + "Check whether actions are using using full qualified collection names." + ) + tags = ["formatting"] + version_added = "v6.8.0" + module_aliases: dict[str, str] = {"block/always/rescue": "block/always/rescue"} + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + module = task["action"]["__ansible_module_original__"] + + if module not in self.module_aliases: + loaded_module = module_loader.find_plugin_with_context(module) + target = loaded_module.resolved_fqcn + self.module_aliases[module] = target + if target is None: + _logger.warning("Unable to resolve FQCN for module %s", module) + self.module_aliases[module] = module + return [] + if target not in self.module_aliases: + self.module_aliases[target] = target + + if module != self.module_aliases[module]: + module_alias = self.module_aliases[module] + if module_alias.startswith("ansible.builtin"): + legacy_module = module_alias.replace( + "ansible.builtin.", "ansible.legacy.", 1 + ) + if module != legacy_module: + result.append( + self.create_matcherror( + message=f"Use FQCN for builtin module actions ({module}).", + details=f"Use `{module_alias}` or `{legacy_module}` instead.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[action-core]", + ) + ) + else: + if module.count(".") < 2: + result.append( + self.create_matcherror( + message=f"Use FQCN for module actions, such `{self.module_aliases[module]}`.", + details=f"Action `{module}` is not FQCN.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[action]", + ) + ) + # TODO(ssbarnea): Remove the c.g. and c.n. exceptions from here once + # community team is flattening these. + # See: https://github.com/ansible-community/community-topics/issues/147 + elif not module.startswith("community.general.") or module.startswith( + "community.network." + ): + result.append( + self.create_matcherror( + message=f"You should use canonical module name `{self.module_aliases[module]}` instead of `{module}`.", + filename=file, + linenumber=task["__line__"], + tag="fqcn[canonical]", + ) + ) + return result + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind != "playbook": + return [] + if "collections" in data: + return [ + self.create_matcherror( + message="Avoid `collections` keyword by using FQCN for all plugins, modules, roles and playbooks.", + linenumber=data[LINE_NUMBER_KEY], + tag="fqcn[keyword]", + filename=file, + ) + ] + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_fqcn_builtin_fail() -> None: + """Test rule matches.""" + collection = RulesCollection() + collection.register(FQCNBuiltinsRule()) + success = "examples/playbooks/rule-fqcn-fail.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 3 + assert results[0].tag == "fqcn[keyword]" + assert "Avoid `collections` keyword" in results[0].message + assert results[1].tag == "fqcn[action-core]" + assert "Use FQCN for builtin module actions" in results[1].message + assert results[2].tag == "fqcn[action]" + assert "Use FQCN for module actions, such" in results[2].message + + def test_fqcn_builtin_pass() -> None: + """Test rule does not match.""" + collection = RulesCollection() + collection.register(FQCNBuiltinsRule()) + success = "examples/playbooks/rule-fqcn-pass.yml" + results = Runner(success, rules=collection).run() + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/galaxy.md b/src/ansiblelint/rules/galaxy.md new file mode 100644 index 0000000..b974d87 --- /dev/null +++ b/src/ansiblelint/rules/galaxy.md @@ -0,0 +1,108 @@ +# galaxy + +This rule identifies if the collection version mentioned in galaxy.yml is ideal +in terms of the version number being greater than or equal to `1.0.0`. + +This rule looks for a changelog file in expected locations, detailed below in +the Changelog Details section. + +This rule checks to see if the `galaxy.yml` file includes one of the required +tags for certification on Automation Hub. Additional custom tags can be added, +but one or more of these tags must be present for certification. + +The tag list is as follows: `application`, `cloud`,`database`, `infrastructure`, +`linux`, `monitoring`, `networking`, `security`,`storage`, `tools`, `windows`. + +This rule can produce messages such: + +- `galaxy[version-missing]` - `galaxy.yaml` should have version tag. +- `galaxy[version-incorrect]` - collection version should be greater than or + equal to `1.0.0` +- `galaxy[no-changelog]` - collection is missing a changelog file in expected + locations. +- `galaxy[tags]` - `galaxy.yaml` must have one of the required tags: + `application`, `cloud`, `database`, `infrastructure`, `linux`, `monitoring`, + `networking`, `security`, `storage`, `tools`, `windows`. + +If you want to ignore some of the messages above, you can add any of them to the +`ignore_list`. + +## Problematic code + +```yaml +# galaxy.yml +--- +name: foo +namespace: bar +version: 0.2.3 # <-- collection version should be >= 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +``` + +## Correct code + +```yaml +# galaxy.yml +--- +name: foo +namespace: bar +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +``` + +# Changelog Details + +This rule expects a `CHANGELOG.md` or `.rst` file in the collection root or a +`changelogs/changelog.yaml` file. + +If a `changelogs/changelog.yaml` file exists, the schema will be checked. + +## Minimum required changelog.yaml file + +```yaml +# changelog.yaml +--- +releases: {} +``` + +# Required Tag Details + +## Problematic code + +```yaml +# galaxy.yml +--- +namespace: bar +name: foo +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ORG/REPO_NAME +``` + +## Correct code + +```yaml +# galaxy.yml +--- +namespace: bar +name: foo +version: 1.0.0 +authors: + - John +readme: ../README.md +description: "..." +license: + - Apache-2.0 +repository: https://github.com/ORG/REPO_NAME +tags: [networking, test_tag, test_tag_2] +``` diff --git a/src/ansiblelint/rules/galaxy.py b/src/ansiblelint/rules/galaxy.py new file mode 100644 index 0000000..cdaf4ed --- /dev/null +++ b/src/ansiblelint/rules/galaxy.py @@ -0,0 +1,232 @@ +"""Implementation of GalaxyRule.""" +from __future__ import annotations + +import os +import sys +from functools import total_ordering +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class GalaxyRule(AnsibleLintRule): + """Rule for checking collection version is greater than 1.0.0 and checking for changelog.""" + + id = "galaxy" + description = "Confirm via galaxy.yml file if collection version is greater than or equal to 1.0.0 and check for changelog." + severity = "MEDIUM" + tags = ["metadata"] + version_added = "v6.11.0 (last update)" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific play (entry in playbook).""" + if file.kind != "galaxy": # type: ignore + return [] + + # Defined by Automation Hub Team and Partner Engineering + required_tag_list = [ + "application", + "cloud", + "database", + "infrastructure", + "linux", + "monitoring", + "networking", + "security", + "storage", + "tools", + "windows", + ] + + results = [] + + base_path = os.path.split(str(file.abspath))[0] + changelog_found = 0 + changelog_paths = [ + os.path.join(base_path, "changelogs", "changelog.yaml"), + os.path.join(base_path, "CHANGELOG.rst"), + os.path.join(base_path, "CHANGELOG.md"), + ] + + for path in changelog_paths: + if os.path.isfile(path): + changelog_found = 1 + + galaxy_tag_list = data.get("tags", None) + + # Changelog Check - building off Galaxy rule as there is no current way to check + # for a nonexistent file + if not changelog_found: + results.append( + self.create_matcherror( + message="No changelog found. Please add a changelog file. Refer to the galaxy.md file for more info.", + tag="galaxy[no-changelog]", + filename=file, + ) + ) + + # Checking if galaxy.yml contains one or more required tags for certification + if not galaxy_tag_list or not any( + tag in required_tag_list for tag in galaxy_tag_list + ): + results.append( + self.create_matcherror( + message=( + f"galaxy.yaml must have one of the required tags: {required_tag_list}" + ), + tag="galaxy[tags]", + filename=file, + ) + ) + + if "version" not in data: + results.append( + self.create_matcherror( + message="galaxy.yaml should have version tag.", + linenumber=data[LINE_NUMBER_KEY], + tag="galaxy[version-missing]", + filename=file, + ) + ) + return results + # returning here as it does not make sense + # to continue for version check below + + version = data.get("version") + if Version(version) < Version("1.0.0"): + results.append( + self.create_matcherror( + message="collection version should be greater than or equal to 1.0.0", + # pylint: disable=protected-access + linenumber=version._line_number, + tag="galaxy[version-incorrect]", + filename=file, + ) + ) + + return results + + +@total_ordering +class Version: + """Simple class to compare arbitrary versions.""" + + def __init__(self, version_string: str): + """Construct a Version object.""" + self.components = version_string.split(".") + + def __eq__(self, other: object) -> bool: + """Implement equality comparison.""" + try: + other = _coerce(other) + except NotImplementedError: + return NotImplemented + + return self.components == other.components + + def __lt__(self, other: Version) -> bool: + """Implement lower-than operation.""" + other = _coerce(other) + + return self.components < other.components + + +def _coerce(other: object) -> Version: + if isinstance(other, str): + other = Version(other) + if isinstance(other, (int, float)): + other = Version(str(other)) + if isinstance(other, Version): + return other + raise NotImplementedError(f"Unable to coerce object type {type(other)} to Version") + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner + + def test_galaxy_collection_version_positive() -> None: + """Positive test for collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + success = "examples/collection/galaxy.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_galaxy_collection_version_negative() -> None: + """Negative test for collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + failure = "examples/meta/galaxy.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + + def test_galaxy_no_collection_version() -> None: + """Test for no collection version in galaxy.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + failure = "examples/no_collection_version/galaxy.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + + def test_changelog_present() -> None: + """Positive test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + good_runner = Runner("examples/collection/galaxy.yml", rules=collection) + assert [] == good_runner.run() + + def test_changelog_missing() -> None: + """Negative test for finding a changelog.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner("examples/no_changelog/galaxy.yml", rules=collection) + result = bad_runner.run() + assert len(result) == 1 + for item in result: + assert item.tag == "galaxy[no-changelog]" + + def test_version_class() -> None: + """Test for version class.""" + v = Version("1.0.0") + assert v == Version("1.0.0") + assert v != NotImplemented + + def test_coerce() -> None: + """Test for _coerce function.""" + assert _coerce("1.0") == Version("1.0") + assert _coerce(1.0) == Version("1.0") + expected = "Unable to coerce object type" + with pytest.raises(NotImplementedError, match=expected): + _coerce(type(Version)) + + def test_galaxy_tags_pass() -> None: + """Test for required tags.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner( + "examples/galaxy_no_required_tags/pass/galaxy.yml", rules=collection + ) + result = bad_runner.run() + assert len(result) == 0 + + def test_galaxy_tags_fail() -> None: + """Test for required tags.""" + collection = RulesCollection() + collection.register(GalaxyRule()) + bad_runner = Runner( + "examples/galaxy_no_required_tags/fail/galaxy.yml", rules=collection + ) + result = bad_runner.run() + assert len(result) == 1 + for item in result: + assert item.tag == "galaxy[tags]" diff --git a/src/ansiblelint/rules/ignore_errors.md b/src/ansiblelint/rules/ignore_errors.md new file mode 100644 index 0000000..cb17774 --- /dev/null +++ b/src/ansiblelint/rules/ignore_errors.md @@ -0,0 +1,61 @@ +# ignore-errors + +This rule checks that playbooks do not use the `ignore_errors` directive to ignore all errors. +Ignoring all errors in a playbook hides actual failures, incorrectly mark tasks as failed, and result in unexpected side effects and behavior. + +Instead of using the `ignore_errors: true` directive, you should do the following: + +- Ignore errors only when using the `{{ ansible_check_mode }}` variable. +- Use `register` to register errors. +- Use `failed_when:` and specify acceptable error conditions. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: true # <- Ignores all errors, including important failures. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: "{{ ansible_check_mode }}" # <- Ignores errors in check mode. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Run apt-get update + ansible.builtin.command: apt-get update + ignore_errors: true + register: ignore_errors_register # <- Stores errors and failures for evaluation. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Disable apport + become: "yes" + lineinfile: + line: "enabled=0" + dest: /etc/default/apport + mode: 0644 + state: present + register: default_apport + failed_when: default_apport.rc !=0 and not default_apport.rc == 257 # <- Defines conditions that constitute a failure. +``` diff --git a/src/ansiblelint/rules/ignore_errors.py b/src/ansiblelint/rules/ignore_errors.py new file mode 100644 index 0000000..e9f7c40 --- /dev/null +++ b/src/ansiblelint/rules/ignore_errors.py @@ -0,0 +1,130 @@ +"""IgnoreErrorsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class IgnoreErrorsRule(AnsibleLintRule): + """Use failed_when and specify error conditions instead of using ignore_errors.""" + + id = "ignore-errors" + description = ( + "Instead of ignoring all errors, ignore the errors only when using ``{{ ansible_check_mode }}``, " + "register the errors using ``register``, " + "or use ``failed_when:`` and specify acceptable error conditions " + "to reduce the risk of ignoring important failures." + ) + severity = "LOW" + tags = ["unpredictability"] + version_added = "v5.0.7" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if ( + task.get("ignore_errors") + and task.get("ignore_errors") != "{{ ansible_check_mode }}" + and not task.get("register") + ): + return True + + return False + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + IGNORE_ERRORS_TRUE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: true +""" + + IGNORE_ERRORS_FALSE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: false +""" + + IGNORE_ERRORS_CHECK_MODE = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: "{{ ansible_check_mode }}" +""" + + IGNORE_ERRORS_REGISTER = """ +- hosts: all + tasks: + - name: Run apt-get update + command: apt-get update + ignore_errors: true + register: ignore_errors_register +""" + + FAILED_WHEN = """ +- hosts: all + tasks: + - name: Disable apport + become: 'yes' + lineinfile: + line: "enabled=0" + dest: /etc/default/apport + mode: 0644 + state: present + register: default_apport + failed_when: default_apport.rc !=0 and not default_apport.rc == 257 +""" + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_true(rule_runner: RunFromText) -> None: + """The task uses ignore_errors.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_TRUE) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_false(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: false, oddly enough.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_FALSE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_check_mode(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: "{{ ansible_check_mode }}".""" + results = rule_runner.run_playbook(IGNORE_ERRORS_CHECK_MODE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_ignore_errors_register(rule_runner: RunFromText) -> None: + """The task uses ignore_errors: but output is registered and managed.""" + results = rule_runner.run_playbook(IGNORE_ERRORS_REGISTER) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (IgnoreErrorsRule,), indirect=["rule_runner"] + ) + def test_failed_when(rule_runner: RunFromText) -> None: + """Instead of ignore_errors, this task uses failed_when.""" + results = rule_runner.run_playbook(FAILED_WHEN) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/inline_env_var.md b/src/ansiblelint/rules/inline_env_var.md new file mode 100644 index 0000000..bc83f7e --- /dev/null +++ b/src/ansiblelint/rules/inline_env_var.md @@ -0,0 +1,38 @@ +# inline-env-var + +This rule checks that playbooks do not set environment variables in the `ansible.builtin.command` module. + +You should set environment variables with the `ansible.builtin.shell` module or the `environment` keyword. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.command: MY_ENV_VAR=my_value # <- Sets an environment variable in the command module. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.shell: echo $MY_ENV_VAR + environment: + MY_ENV_VAR: my_value # <- Sets an environment variable with the environment keyword. +``` + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Set environment variable + ansible.builtin.shell: MY_ENV_VAR=my_value # <- Sets an environment variable with the shell module. +``` diff --git a/src/ansiblelint/rules/inline_env_var.py b/src/ansiblelint/rules/inline_env_var.py new file mode 100644 index 0000000..7fd2aa0 --- /dev/null +++ b/src/ansiblelint/rules/inline_env_var.py @@ -0,0 +1,74 @@ +"""Implementation of inside-env-var rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import FILENAME_KEY, LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import get_first_cmd_arg + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class EnvVarsInCommandRule(AnsibleLintRule): + """Command module does not accept setting environment variables inline.""" + + id = "inline-env-var" + description = ( + "Use ``environment:`` to set environment variables " + "or use ``shell`` module which accepts both" + ) + severity = "VERY_HIGH" + tags = ["command-shell", "idiom"] + version_added = "historic" + + expected_args = [ + "chdir", + "creates", + "executable", + "removes", + "stdin", + "warn", + "stdin_add_newline", + "strip_empty_ends", + "cmd", + "__ansible_module__", + "__ansible_module_original__", + "__ansible_arguments__", + LINE_NUMBER_KEY, + FILENAME_KEY, + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in ["command"]: + first_cmd_arg = get_first_cmd_arg(task) + if not first_cmd_arg: + return False + + return any( + [arg not in self.expected_args for arg in task["action"]] + + ["=" in first_cmd_arg] + ) + return False diff --git a/src/ansiblelint/rules/jinja.md b/src/ansiblelint/rules/jinja.md new file mode 100644 index 0000000..8e1732e --- /dev/null +++ b/src/ansiblelint/rules/jinja.md @@ -0,0 +1,55 @@ +# jinja + +This rule can report problems related to jinja2 string templates. The current +version can report: + +- `jinja[spacing]` when there are no spaces between variables + and operators, including filters, like `{{ var_name | filter }}`. This + improves readability and makes it less likely to introduce typos. +- `jinja[invalid]` when the jinja2 template is invalid, like `{{ {{ '1' }} }}`, + which would result in a runtime error if you try to use it with Ansible, even + if it does pass the Ansible syntax check. + +As jinja2 syntax is closely following Python one we aim to follow +[black](https://black.readthedocs.io/en/stable/) formatting rules. If you are +curious how black would reformat a small sniped feel free to visit +[online black formatter](https://black.vercel.app/) site. Keep in mind to not +include the entire jinja2 template, so instead of `{{ 1+2==3 }}`, do paste +only `1+2==3`. + +In ansible, `changed_when`, `failed_when`, `until`, `when` are considered to +use implicit jinja2 templating, meaning that they do not require `{{ }}`. Our +rule will suggest the removal of the braces for these fields. + +## Problematic code + +```yaml +--- +- name: Some task + vars: + foo: "{{some|dict2items}}" # <-- jinja[spacing] + bar: "{{ & }}" # <-- jinja[invalid] + when: "{{ foo | bool }}" # <-- jinja[spacing] - 'when' has implicit templating +``` + +## Correct code + +```yaml +--- +- name: Some task + vars: + foo: "{{ some | dict2items }}" + bar: "{{ '&' }}" + when: foo | bool +``` + +## Current limitations + +In its current form, this rule presents the following limitations: + +- Jinja2 blocks that have newlines in them will not be reformatted because we + consider that the user deliberately wanted to format them in a particular way. +- Jinja2 blocks that use tilde as a binary operation are ignored because black + does not support tilde as a binary operator. Example: `{{ a ~ b }}`. +- Jinja2 blocks that use dot notation with numbers are ignored because python + and black do not allow it. Example: `{{ foo.0.bar }}` diff --git a/src/ansiblelint/rules/jinja.py b/src/ansiblelint/rules/jinja.py new file mode 100644 index 0000000..8057fd4 --- /dev/null +++ b/src/ansiblelint/rules/jinja.py @@ -0,0 +1,675 @@ +"""Rule for checking content of jinja template strings.""" +from __future__ import annotations + +import logging +import re +import sys +from collections import namedtuple +from typing import TYPE_CHECKING, Any + +import black +import jinja2 +from ansible.errors import AnsibleError, AnsibleParserError +from ansible.parsing.yaml.objects import AnsibleUnicode +from jinja2.exceptions import TemplateSyntaxError + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.skip_utils import get_rule_skips_from_line +from ansiblelint.utils import parse_yaml_from_file, template +from ansiblelint.yaml_utils import deannotate, nested_items_path + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +_logger = logging.getLogger(__package__) +KEYWORDS_WITH_IMPLICIT_TEMPLATE = ("changed_when", "failed_when", "until", "when") + +Token = namedtuple("Token", "lineno token_type value") + +ignored_re = re.compile( + "|".join( + [ + r"^Object of type method is not JSON serializable", + r"^Unexpected templating type error occurred on", + r"^obj must be a list of dicts or a nested dict$", + r"^the template file (.*) could not be found for the lookup$", + r"could not locate file in lookup", + r"unable to locate collection", + ] + ) +) + + +class JinjaRule(AnsibleLintRule): + """Rule that looks inside jinja2 templates.""" + + id = "jinja" + severity = "LOW" + tags = ["formatting"] + version_added = "v6.5.0" + _ansible_error_re = re.compile( + r"^(?P<error>.*): (?P<detail>.*)\. String: (?P<string>.*)$", flags=re.MULTILINE + ) + + env = jinja2.Environment(trim_blocks=False) + _tag2msg = { + "invalid": "Syntax error in jinja2 template: {value}", + "spacing": "Jinja2 spacing could be improved: {value} -> {reformatted}", + } + + def _msg(self, tag: str, value: str, reformatted: str) -> str: + """Generate error message.""" + return self._tag2msg[tag].format(value=value, reformatted=reformatted) + + # pylint: disable=too-many-branches,too-many-locals + def matchtask( # noqa: C901 + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + try: + for key, v, path in nested_items_path( + task, + ignored_keys=("block", "ansible.builtin.block", "ansible.legacy.block"), + ): + if isinstance(v, str): + try: + template( + basedir=file.dir if file else ".", + value=v, + variables=deannotate(task.get("vars", {})), + fail_on_error=True, # we later decide which ones to ignore or not + ) + # ValueError RepresenterError + except AnsibleError as exc: + bypass = False + orig_exc = ( + exc.orig_exc if getattr(exc, "orig_exc", None) else exc + ) + orig_exc_message = getattr(orig_exc, "message", str(orig_exc)) + match = self._ansible_error_re.match( + getattr(orig_exc, "message", str(orig_exc)) + ) + if ignored_re.match(orig_exc_message): + bypass = True + elif isinstance(orig_exc, AnsibleParserError): + # "An unhandled exception occurred while running the lookup plugin '...'. Error was a <class 'ansible.errors.AnsibleParserError'>, original message: Invalid filename: 'None'. Invalid filename: 'None'" + + # An unhandled exception occurred while running the lookup plugin 'template'. Error was a <class 'ansible.errors.AnsibleError'>, original message: the template file ... could not be found for the lookup. the template file ... could not be found for the lookup + + # ansible@devel (2.14) new behavior: + # AnsibleError(TemplateSyntaxError): template error while templating string: Could not load "ipwrap": 'Invalid plugin FQCN (ansible.netcommon.ipwrap): unable to locate collection ansible.netcommon'. String: Foo {{ buildset_registry.host | ipwrap }}. Could not load "ipwrap": 'Invalid plugin FQCN (ansible.netcommon.ipwrap): unable to locate collection ansible.netcommon' + bypass = True + elif ( + isinstance(orig_exc, (AnsibleError, TemplateSyntaxError)) + and match + ): + error = match.group("error") + detail = match.group("detail") + # string = match.group("string") + if error.startswith( + "template error while templating string" + ): + bypass = False + elif detail.startswith("unable to locate collection"): + _logger.debug("Ignored AnsibleError: %s", exc) + bypass = True + else: + bypass = False + elif re.match(r"^lookup plugin (.*) not found$", exc.message): + # lookup plugin 'template' not found + bypass = True + + # AnsibleFilterError: 'obj must be a list of dicts or a nested dict' + # AnsibleError: template error while templating string: expected token ':', got '}'. String: {{ {{ '1' }} }} + # AnsibleError: template error while templating string: unable to locate collection ansible.netcommon. String: Foo {{ buildset_registry.host | ipwrap }} + if not bypass: + result.append( + self.create_matcherror( + message=str(exc), + linenumber=_get_error_line(task, path), + filename=file, + tag=f"{self.id}[invalid]", + ) + ) + continue + reformatted, details, tag = self.check_whitespace( + v, key=key, lintable=file + ) + if reformatted != v: + result.append( + self.create_matcherror( + message=self._msg( + tag=tag, value=v, reformatted=reformatted + ), + linenumber=_get_error_line(task, path), + details=details, + filename=file, + tag=f"{self.id}[{tag}]", + ) + ) + except Exception as exc: + _logger.info("Exception in JinjaRule.matchtask: %s", exc) + raise + return result + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches for variables defined in vars files.""" + data: dict[str, Any] = {} + raw_results: list[MatchError] = [] + results: list[MatchError] = [] + + if str(file.kind) == "vars": + data = parse_yaml_from_file(str(file.path)) + # pylint: disable=unused-variable + for key, v, path in nested_items_path(data): + if isinstance(v, AnsibleUnicode): + reformatted, details, tag = self.check_whitespace( + v, key=key, lintable=file + ) + if reformatted != v: + results.append( + self.create_matcherror( + message=self._msg( + tag=tag, value=v, reformatted=reformatted + ), + linenumber=v.ansible_pos[1], + details=details, + filename=file, + tag=f"{self.id}[{tag}]", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + else: + results.extend(super().matchyaml(file)) + return results + + def lex(self, text: str) -> list[Token]: + """Parse jinja template.""" + # https://github.com/pallets/jinja/issues/1711 + self.env.keep_trailing_newline = True + + self.env.lstrip_blocks = False + self.env.trim_blocks = False + tokens = [ + Token(lineno=t[0], token_type=t[1], value=t[2]) for t in self.env.lex(text) + ] + new_text = self.unlex(tokens) + if text != new_text: + _logger.debug( + "Unable to perform full roundtrip lex-unlex on jinja template (expected when '-' modifier is used): {text} -> {new_text}" + ) + return tokens + + def unlex(self, tokens: list[Token]) -> str: + """Return original text by compiling the lex output.""" + result = "" + last_lineno = 1 + last_value = "" + for lineno, _, value in tokens: + if lineno > last_lineno and "\n" not in last_value: + result += "\n" + result += value + last_lineno = lineno + last_value = value + return result + + # pylint: disable=too-many-branches,too-many-statements,too-many-locals + def check_whitespace( # noqa: max-complexity: 13 + self, text: str, key: str, lintable: Lintable | None = None + ) -> tuple[str, str, str]: + """Check spacing inside given jinja2 template string. + + We aim to match Python Black formatting rules. + :raises NotImplementedError: On few cases where valid jinja is not valid Python. + + :returns: (string, string, string) reformatted text, detailed error, error tag + """ + + def cook(value: str, implicit: bool = False) -> str: + """Prepare an implicit string for jinja parsing when needed.""" + if not implicit: + return value + if value.startswith("{{") and value.endswith("}}"): + # maybe we should make this an error? + return value + return f"{{{{ {value} }}}}" + + def uncook(value: str, implicit: bool = False) -> str: + """Restore an string to original form when it was an implicit one.""" + if not implicit: + return value + return value[3:-3] + + tokens = [] + details = "" + begin_types = ("variable_begin", "comment_begin", "block_begin") + end_types = ("variable_end", "comment_end", "block_end") + implicit = False + + # implicit templates do not have the {{ }} wrapping + if ( + key in KEYWORDS_WITH_IMPLICIT_TEMPLATE + and lintable + and lintable.kind + in ( + "playbook", + "task", + ) + ): + implicit = True + text = cook(text, implicit=implicit) + + expr_str = None + expr_type = None + verb_skipped = True + lineno = 1 + try: + for token in self.lex(text): + if ( + expr_type + and expr_type.startswith("{%") + and token.token_type in ("name", "whitespace") + and not verb_skipped + ): + # on {% blocks we do not take first word as part of the expression + tokens.append(token) + if token.token_type != "whitespace": + verb_skipped = True + elif token.token_type in begin_types: + tokens.append(token) + expr_type = token.value # such {#, {{, {% + expr_str = "" + verb_skipped = False + elif token.token_type in end_types and expr_str is not None: + # process expression + # pylint: disable=unsupported-membership-test + if isinstance(expr_str, str) and "\n" in expr_str: + raise NotImplementedError() + leading_spaces = " " * (len(expr_str) - len(expr_str.lstrip())) + expr_str = leading_spaces + blacken(expr_str.lstrip()) + if tokens[ + -1 + ].token_type != "whitespace" and not expr_str.startswith(" "): + expr_str = " " + expr_str + if not expr_str.endswith(" "): + expr_str += " " + tokens.append(Token(lineno, "data", expr_str)) + tokens.append(token) + expr_str = None + expr_type = None + elif expr_str is not None: + expr_str += token.value + else: + tokens.append(token) + lineno = token.lineno + + except jinja2.exceptions.TemplateSyntaxError as exc: + return "", str(exc.message), "invalid" + # https://github.com/PyCQA/pylint/issues/7433 - py311 only + # pylint: disable=c-extension-no-member + except (NotImplementedError, black.parsing.InvalidInput) as exc: + # black is not able to recognize all valid jinja2 templates, so we + # just ignore InvalidInput errors. + # NotImplementedError is raised internally for expressions with + # newlines, as we decided to not touch them yet. + # These both are documented as known limitations. + _logger.debug("Ignored jinja internal error %s", exc) + return uncook(text, implicit), "", "spacing" + + # finalize + reformatted = self.unlex(tokens) + failed = reformatted != text + reformatted = uncook(reformatted, implicit) + details = ( + f"Jinja2 template rewrite recommendation: `{reformatted}`." + if failed + else "" + ) + return reformatted, details, "spacing" + + +def blacken(text: str) -> str: + """Format Jinja2 template using black.""" + return black.format_str( + text, mode=black.FileMode(line_length=sys.maxsize, string_normalization=False) + ).rstrip("\n") + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.fixture(name="error_expected_lines") + def fixture_error_expected_lines() -> list[int]: + """Return list of expected error lines.""" + return [33, 36, 39, 42, 45, 48, 74] + + # 21 68 + @pytest.fixture(name="lint_error_lines") + def fixture_lint_error_lines() -> list[int]: + """Get VarHasSpacesRules linting results on test_playbook.""" + collection = RulesCollection() + collection.register(JinjaRule()) + lintable = Lintable("examples/playbooks/jinja-spacing.yml") + results = Runner(lintable, rules=collection).run() + return list(map(lambda item: item.linenumber, results)) + + def test_jinja_spacing_playbook( + error_expected_lines: list[int], lint_error_lines: list[int] + ) -> None: + """Ensure that expected error lines are matching found linting error lines.""" + # list unexpected error lines or non-matching error lines + error_lines_difference = list( + set(error_expected_lines).symmetric_difference(set(lint_error_lines)) + ) + assert len(error_lines_difference) == 0 + + def test_jinja_spacing_vars() -> None: + """Ensure that expected error details are matching found linting error details.""" + collection = RulesCollection() + collection.register(JinjaRule()) + lintable = Lintable("examples/playbooks/vars/jinja-spacing.yml") + results = Runner(lintable, rules=collection).run() + + error_expected_lineno = [14, 15, 16, 17, 18, 19, 32] + assert len(results) == len(error_expected_lineno) + for idx, err in enumerate(results): + assert err.linenumber == error_expected_lineno[idx] + + @pytest.mark.parametrize( + ("text", "expected", "tag"), + ( + pytest.param( + "{{-x}}{#a#}{%1%}", + "{{- x }}{# a #}{% 1 %}", + "spacing", + id="add-missing-space", + ), + pytest.param("", "", "spacing", id="1"), + pytest.param("foo", "foo", "spacing", id="2"), + pytest.param("{##}", "{# #}", "spacing", id="3"), + # we want to keep leading spaces as they might be needed for complex multiline jinja files + pytest.param("{# #}", "{# #}", "spacing", id="4"), + pytest.param( + "{{-aaa|xx }}foo\nbar{#some#}\n{%%}", + "{{- aaa | xx }}foo\nbar{# some #}\n{% %}", + "spacing", + id="5", + ), + pytest.param( + "Shell with jinja filter", "Shell with jinja filter", "spacing", id="6" + ), + pytest.param( + "{{{'dummy_2':1}|true}}", + "{{ {'dummy_2': 1} | true }}", + "spacing", + id="7", + ), + pytest.param("{{{foo:{}}}}", "{{ {foo: {}} }}", "spacing", id="8"), + pytest.param( + "{{ {'test': {'subtest': variable}} }}", + "{{ {'test': {'subtest': variable}} }}", + "spacing", + id="9", + ), + pytest.param( + "http://foo.com/{{\n case1 }}", + "http://foo.com/{{\n case1 }}", + "spacing", + id="10", + ), + pytest.param("{{foo(123)}}", "{{ foo(123) }}", "spacing", id="11"), + pytest.param("{{ foo(a.b.c) }}", "{{ foo(a.b.c) }}", "spacing", id="12"), + # pytest.param( + # "{{ foo | bool else [ ] }}", + # "{{ foo | bool else [] }}", + # "spacing", + # id="13", + # ), + pytest.param( + "{{foo(x =['server_options'])}}", + "{{ foo(x=['server_options']) }}", + "spacing", + id="14", + ), + pytest.param( + '{{ [ "host", "NA"] }}', '{{ ["host", "NA"] }}', "spacing", id="15" + ), + pytest.param( + "{{ {'dummy_2': {'nested_dummy_1': value_1,\n 'nested_dummy_2': value_2}} |\ncombine(dummy_1) }}", + "{{ {'dummy_2': {'nested_dummy_1': value_1,\n 'nested_dummy_2': value_2}} |\ncombine(dummy_1) }}", + "spacing", + id="17", + ), + pytest.param("{{ & }}", "", "invalid", id="18"), + pytest.param( + "{{ good_format }}/\n{{- good_format }}\n{{- good_format -}}\n", + "{{ good_format }}/\n{{- good_format }}\n{{- good_format -}}\n", + "spacing", + id="19", + ), + pytest.param( + "{{ {'a': {'b': 'x', 'c': y}} }}", + "{{ {'a': {'b': 'x', 'c': y}} }}", + "spacing", + id="20", + ), + pytest.param( + "2*(1+(3-1)) is {{ 2 * {{ 1 + {{ 3 - 1 }}}} }}", + "2*(1+(3-1)) is {{ 2 * {{1 + {{3 - 1}}}} }}", + "spacing", + id="21", + ), + pytest.param( + '{{ "absent"\nif (v is version("2.8.0", ">=")\nelse "present" }}', + "", + "invalid", + id="22", + ), + pytest.param( + '{{lookup("x",y+"/foo/"+z+".txt")}}', + '{{ lookup("x", y + "/foo/" + z + ".txt") }}', + "spacing", + id="23", + ), + pytest.param( + "{{ x | map(attribute='value') }}", + "{{ x | map(attribute='value') }}", + "spacing", + id="24", + ), + pytest.param( + "{{ r(a= 1,b= True,c= 0.0,d= '') }}", + "{{ r(a=1, b=True, c=0.0, d='') }}", + "spacing", + id="25", + ), + pytest.param("{{ r(1,[]) }}", "{{ r(1, []) }}", "spacing", id="26"), + pytest.param( + "{{ lookup([ddd ]) }}", "{{ lookup([ddd]) }}", "spacing", id="27" + ), + pytest.param( + "{{ [ x ] if x is string else x }}", + "{{ [x] if x is string else x }}", + "spacing", + id="28", + ), + pytest.param( + # "{% if a|int <= 8 -%} iptables {%- else -%} iptables-nft {%- endif %}", + "{% if a|int <= 8 -%} iptables {%- else -%} iptables-nft {%- endif %}", + "{% if a | int <= 8 -%} iptables{%- else -%} iptables-nft{%- endif %}", + "spacing", + id="29", + ), + pytest.param( + # "- 2" -> "-2", minus does not get separated when there is no left side + "{{ - 2 }}", + "{{ -2 }}", + "spacing", + id="30", + ), + pytest.param( + # "-2" -> "-2", minus does get an undesired spacing + "{{ -2 }}", + "{{ -2 }}", + "spacing", + id="31", + ), + pytest.param( + # array ranges do not have space added + "{{ foo[2:4] }}", + "{{ foo[2:4] }}", + "spacing", + id="32", + ), + pytest.param( + # array ranges have the extra space removed + "{{ foo[2: 4] }}", + "{{ foo[2:4] }}", + "spacing", + id="33", + ), + pytest.param( + # negative array index + "{{ foo[-1] }}", + "{{ foo[-1] }}", + "spacing", + id="34", + ), + pytest.param( + # negative array index, repair + "{{ foo[- 1] }}", + "{{ foo[-1] }}", + "spacing", + id="35", + ), + pytest.param("{{ a +~'b' }}", "{{ a + ~'b' }}", "spacing", id="36"), + pytest.param( + "{{ (a[: -4] *~ b) }}", "{{ (a[:-4] * ~b) }}", "spacing", id="37" + ), + pytest.param("{{ [a,~ b] }}", "{{ [a, ~b] }}", "spacing", id="38"), + # Not supported yet due to being accepted by black: + pytest.param("{{ item.0.user }}", "{{ item.0.user }}", "spacing", id="39"), + # Not supported by back, while jinja allows ~ to be binary operator: + pytest.param("{{ a ~ b }}", "{{ a ~ b }}", "spacing", id="40"), + pytest.param( + "--format='{{'{{'}}.Size{{'}}'}}'", + "--format='{{ '{{' }}.Size{{ '}}' }}'", + "spacing", + id="41", + ), + pytest.param( + "{{ list_one + {{ list_two | max }} }}", + "{{ list_one + {{list_two | max}} }}", + "spacing", + id="42", + ), + pytest.param( + "{{ lookup('file' , '/tmp/non-existent', errors='ignore') }}", + "{{ lookup('file', '/tmp/non-existent', errors='ignore') }}", + "spacing", + id="43", + ), + ), + ) + def test_jinja(text: str, expected: str, tag: str) -> None: + """Tests our ability to spot spacing errors inside jinja2 templates.""" + rule = JinjaRule() + + reformatted, details, returned_tag = rule.check_whitespace( + text, key="name", lintable=Lintable("playbook.yml") + ) + assert tag == returned_tag, details + assert expected == reformatted + + @pytest.mark.parametrize( + ("text", "expected", "tag"), + ( + pytest.param( + "1+2", + "1 + 2", + "spacing", + id="0", + ), + pytest.param( + "- 1", + "-1", + "spacing", + id="1", + ), + # Ensure that we do not choke with double templating on implicit + # and instead we remove them braces. + pytest.param("{{ o | bool }}", "o | bool", "spacing", id="2"), + ), + ) + def test_jinja_implicit(text: str, expected: str, tag: str) -> None: + """Tests our ability to spot spacing errors implicit jinja2 templates.""" + rule = JinjaRule() + # implicit jinja2 are working only inside playbooks and tasks + lintable = Lintable(name="playbook.yml", kind="playbook") + reformatted, details, returned_tag = rule.check_whitespace( + text, key="when", lintable=lintable + ) + assert tag == returned_tag, details + assert expected == reformatted + + @pytest.mark.parametrize( + ("lintable", "matches"), + (pytest.param("examples/playbooks/vars/rule_jinja_vars.yml", 0, id="0"),), + ) + def test_jinja_file(lintable: str, matches: int) -> None: + """Tests our ability to process var filesspot spacing errors.""" + collection = RulesCollection() + collection.register(JinjaRule()) + errs = Runner(lintable, rules=collection).run() + assert len(errs) == matches + for err in errs: + assert isinstance(err, JinjaRule) + assert errs[0].tag == "jinja[invalid]" + assert errs[0].rule.id == "jinja" + + def test_jinja_invalid() -> None: + """Tests our ability to spot spacing errors inside jinja2 templates.""" + collection = RulesCollection() + collection.register(JinjaRule()) + success = "examples/playbooks/rule-jinja-invalid.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 2 + assert errs[0].tag == "jinja[spacing]" + assert errs[0].rule.id == "jinja" + assert errs[0].linenumber == 9 + assert errs[1].tag == "jinja[invalid]" + assert errs[1].rule.id == "jinja" + assert errs[1].linenumber == 9 + + def test_jinja_valid() -> None: + """Tests our ability to parse jinja, even when variables may not be defined.""" + collection = RulesCollection() + collection.register(JinjaRule()) + success = "examples/playbooks/rule-jinja-valid.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 0 + + +def _get_error_line(task: dict[str, Any], path: list[str | int]) -> int: + """Return error line number.""" + line = task[LINE_NUMBER_KEY] + ctx = task + for _ in path: + ctx = ctx[_] + if LINE_NUMBER_KEY in ctx: + line = ctx[LINE_NUMBER_KEY] + if not isinstance(line, int): + raise RuntimeError("Line number is not an integer") + return line diff --git a/src/ansiblelint/rules/key_order.md b/src/ansiblelint/rules/key_order.md new file mode 100644 index 0000000..378d8a5 --- /dev/null +++ b/src/ansiblelint/rules/key_order.md @@ -0,0 +1,63 @@ +# key-order + +This rule recommends reordering key names in ansible content to make +code easier to maintain and less prone to errors. + +Here are some examples of common ordering checks done for tasks and handlers: + +- `name` must always be the first key for plays, tasks and handlers +- on tasks, the `block`, `rescue` and `always` keys must be the last keys, + as this would avoid accidental miss-indentation errors between the last task + and the parent level. + +## Problematic code + +```yaml +--- +- hosts: localhost + name: This is a playbook # <-- name key should be the first one + tasks: + - name: A block + block: + - name: Display a message + debug: + msg: "Hello world!" + when: true # <-- when key should be before block +``` + +## Correct code + +```yaml +--- +- name: This is a playbook + hosts: localhost + tasks: + - name: A block + when: true + block: + - name: Display a message + debug: + msg: "Hello world!" +``` + +## Reasoning + +Making decisions about the optimal order of keys for ansible tasks or plays is +no easy task, as we had a huge number of combinations to consider. This is also +the reason why we started with a minimal sorting rule (name to be the first), +and aimed to gradually add more fields later, and only when we find the proofs +that one approach is likely better than the other. + +### Why I no longer can put `when` after a `block`? + +Try to remember that in real life, `block/rescue/always` have the habit to +grow due to the number of tasks they host inside, making them exceed what a single screen. This would move the `when` task further away from the rest of the task properties. A `when` from the last task inside the block can +easily be confused as being at the block level, or the reverse. When tasks are +moved from one location to another, there is a real risk of moving the block +level when with it. + +By putting the `when` before the `block`, we avoid that kind of risk. The same risk applies to any simple property at the task level, so that is why +we concluded that the block keys must be the last ones. + +Another common practice was to put `tags` as the last property. Still, for the +same reasons, we decided that they should not be put after block keys either. diff --git a/src/ansiblelint/rules/key_order.py b/src/ansiblelint/rules/key_order.py new file mode 100644 index 0000000..5de637c --- /dev/null +++ b/src/ansiblelint/rules/key_order.py @@ -0,0 +1,155 @@ +"""All tasks should be have name come first.""" +from __future__ import annotations + +import functools +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.testing import RunFromText + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +SORTER_TASKS = ( + "name", + # "__module__", + # "action", + # "args", + None, # <-- None include all modules that not using action and * + # "when", + # "(loop|loop_|with_).*", + # "notify", + # "tags", + "block", + "rescue", + "always", +) + + +def get_property_sort_index(name: str) -> int: + """Return the index of the property in the sorter.""" + a_index = -1 + for i, v in enumerate(SORTER_TASKS): + if v == name: + return i + if v is None: + a_index = i + return a_index + + +def task_property_sorter(property1: str, property2: str) -> int: + """Sort task properties based on SORTER.""" + v_1 = get_property_sort_index(property1) + v_2 = get_property_sort_index(property2) + return (v_1 > v_2) - (v_1 < v_2) + + +class KeyOrderRule(AnsibleLintRule): + """Ensure specific order of keys in mappings.""" + + id = "key-order" + shortdesc = __doc__ + severity = "LOW" + tags = ["formatting"] + version_added = "v6.6.2" + needs_raw_task = True + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + raw_task = task["__raw_task__"] + keys = [key for key in raw_task.keys() if not key.startswith("_")] + sorted_keys = sorted(keys, key=functools.cmp_to_key(task_property_sorter)) + if keys != sorted_keys: + result.append( + self.create_matcherror( + f"You can improve the task key order to: {', '.join(sorted_keys)}", + filename=file, + tag="key-order[task]", + ) + ) + return result + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + PLAY_SUCCESS = """--- +- hosts: localhost + tasks: + - name: Test + command: echo "test" + - name: Test2 + debug: + msg: "Debug without a name" + - name: Flush handlers + meta: flush_handlers + - no_log: true # noqa: key-order + shell: echo hello + name: Task with no_log on top +""" + + @pytest.mark.parametrize("rule_runner", (KeyOrderRule,), indirect=["rule_runner"]) + def test_key_order_task_name_has_name_first_rule_pass( + rule_runner: RunFromText, + ) -> None: + """Test rule matches.""" + results = rule_runner.run_playbook(PLAY_SUCCESS) + assert len(results) == 0 + + @pytest.mark.parametrize("rule_runner", (KeyOrderRule,), indirect=["rule_runner"]) + def test_key_order_task_name_has_name_first_rule_fail( + rule_runner: RunFromText, + ) -> None: + """Test rule matches.""" + results = rule_runner.run("examples/playbooks/rule-key-order-fail.yml") + assert len(results) == 6 + + @pytest.mark.parametrize( + ("properties", "expected"), + ( + pytest.param([], []), + pytest.param(["block", "name"], ["name", "block"]), + pytest.param( + ["block", "name", "action", "..."], ["name", "action", "...", "block"] + ), + ), + ) + def test_key_order_property_sorter( + properties: list[str], expected: list[str] + ) -> None: + """Test the task property sorter.""" + result = sorted(properties, key=functools.cmp_to_key(task_property_sorter)) + assert expected == result + + @pytest.mark.parametrize( + ("key", "order"), + ( + pytest.param("name", 0), + pytest.param("action", 1), + pytest.param("foobar", SORTER_TASKS.index(None)), + pytest.param("block", len(SORTER_TASKS) - 3), + pytest.param("rescue", len(SORTER_TASKS) - 2), + pytest.param("always", len(SORTER_TASKS) - 1), + ), + ) + def test_key_order_property_sort_index(key: str, order: int) -> None: + """Test sorting index.""" + assert get_property_sort_index(key) == order + + @pytest.mark.parametrize( + ("prop1", "prop2", "result"), + ( + pytest.param("name", "block", -1), + pytest.param("block", "name", 1), + pytest.param("block", "block", 0), + ), + ) + def test_key_order_property_sortfunc(prop1: str, prop2: str, result: int) -> None: + """Test sorting function.""" + assert task_property_sorter(prop1, prop2) == result diff --git a/src/ansiblelint/rules/latest.md b/src/ansiblelint/rules/latest.md new file mode 100644 index 0000000..1b20432 --- /dev/null +++ b/src/ansiblelint/rules/latest.md @@ -0,0 +1,43 @@ +# latest + +The `latest` rule checks that module arguments like those used for source +control checkout do not have arguments that might generate different results +based on context. + +This more generic rule replaced two older rules named `git-latest` and +`hg-latest`. + +We are aware that there are genuine cases where getting the tip of the main +branch is not accidental. For these cases, just add a comment such as +`# noqa: latest` to the same line to prevent it from triggering. + +## Possible errors messages: + +- `latest[git]` +- `latest[hg]` + +## Problematic code + +```yaml +--- +- name: Example for `latest` rule + hosts: localhost + tasks: + - name: Risky use of git module + ansible.builtin.git: + repo: "https://github.com/ansible/ansible-lint" + version: HEAD # <-- HEAD value is triggering the rule +``` + +## Correct code + +```yaml +--- +- name: Example for `latest` rule + hosts: localhost + tasks: + - name: Safe use of git module + ansible.builtin.git: + repo: "https://github.com/ansible/ansible-lint" + version: abcd1234... # <-- that is safe +``` diff --git a/src/ansiblelint/rules/latest.py b/src/ansiblelint/rules/latest.py new file mode 100644 index 0000000..a21fdf5 --- /dev/null +++ b/src/ansiblelint/rules/latest.py @@ -0,0 +1,39 @@ +"""Implementation of latest rule.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class LatestRule(AnsibleLintRule): + """Result of the command may vary on subsequent runs.""" + + id = "latest" + description = ( + "All version control checkouts must point to " + "an explicit commit or tag, not just ``latest``" + ) + severity = "MEDIUM" + tags = ["idempotency"] + version_added = "v6.5.2" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError: + """Check if module args are safe.""" + if ( + task["action"]["__ansible_module__"] == "git" + and task["action"].get("version", "HEAD") == "HEAD" + ): + return self.create_matcherror(tag="latest[git]", filename=file) + if ( + task["action"]["__ansible_module__"] == "hg" + and task["action"].get("revision", "default") == "default" + ): + return self.create_matcherror(tag="latest[hg]", filename=file) + return False diff --git a/src/ansiblelint/rules/literal_compare.md b/src/ansiblelint/rules/literal_compare.md new file mode 100644 index 0000000..606402c --- /dev/null +++ b/src/ansiblelint/rules/literal_compare.md @@ -0,0 +1,29 @@ +# literal-compare + +This rule checks for literal comparison with the `when` clause. +Literal comparison, like `when: var == True`, is unnecessarily complex. +Use `when: var` to keep your playbooks simple. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Print environment variable to stdout + ansible.builtin.command: echo $MY_ENV_VAR + when: ansible_os_family == True # <- Adds complexity to your playbook. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Print environment variable to stdout + ansible.builtin.command: echo $MY_ENV_VAR + when: ansible_os_family # <- Keeps your playbook simple. +``` diff --git a/src/ansiblelint/rules/literal_compare.py b/src/ansiblelint/rules/literal_compare.py new file mode 100644 index 0000000..e81be15 --- /dev/null +++ b/src/ansiblelint/rules/literal_compare.py @@ -0,0 +1,48 @@ +"""Implementation of the literal-compare rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018-2021, Ansible Project + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ComparisonToLiteralBoolRule(AnsibleLintRule): + """Don't compare to literal True/False.""" + + id = "literal-compare" + description = ( + "Use ``when: var`` rather than ``when: var == True`` " + "(or conversely ``when: not var``)" + ) + severity = "HIGH" + tags = ["idiom"] + version_added = "v4.0.0" + + literal_bool_compare = re.compile("[=!]= ?(True|true|False|false)") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + for k, v, _ in nested_items_path(task): + if k == "when": + if isinstance(v, str): + if self.literal_bool_compare.search(v): + return True + elif isinstance(v, bool): + pass + else: + for item in v: + if isinstance(item, str) and self.literal_bool_compare.search( + item + ): + return True + + return False diff --git a/src/ansiblelint/rules/loop_var_prefix.md b/src/ansiblelint/rules/loop_var_prefix.md new file mode 100644 index 0000000..33adbd7 --- /dev/null +++ b/src/ansiblelint/rules/loop_var_prefix.md @@ -0,0 +1,78 @@ +# loop-var-prefix + +This rule avoids conflicts with nested looping tasks by configuring a variable +prefix with `loop_var`. Ansible sets `item` as the loop variable. You can use +`loop_var` to specify a prefix for loop variables and ensure they are unique to +each task. + +This rule can produce the following messages: + +- `loop-var-prefix[missing]` - Replace any unsafe implicit `item` loop variable + by adding `loop_var: <loop_var_prefix>...`. +- `loop-var-prefix[wrong]` - Ensure loop variables start with + `<loop_var_prefix>`. + +This rule originates from the [Naming parameters section of Ansible Best +Practices guide][cop314]. + +## Settings + +You can change the behavior of this rule by overriding its default regular +expression used to check loop variable naming. Keep in mind that the `{role}` +part is replaced with the inferred role name when applicable. + +```yaml +# .ansible-lint +loop_var_prefix: "^(__|{role}_)" +``` + +This is an opt-in rule. You must enable it in your Ansible-lint configuration as +follows: + +```yaml +enable_list: + - loop-var-prefix +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Does not set a prefix for loop variables. + ansible.builtin.debug: + var: item + loop: + - foo + - bar # <- These items do not have a unique prefix. + - name: Sets a prefix that is not unique. + ansible.builtin.debug: + var: zz_item + loop: + - foo + - bar + loop_control: + loop_var: zz_item # <- This prefix is not unique. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Sets a unique prefix for loop variables. + ansible.builtin.debug: + var: zz_item + loop: + - foo + - bar + loop_control: + loop_var: my_prefix # <- Specifies a unique prefix for loop variables. +``` + +[cop314]: + https://redhat-cop.github.io/automation-good-practices/#_naming_parameters diff --git a/src/ansiblelint/rules/loop_var_prefix.py b/src/ansiblelint/rules/loop_var_prefix.py new file mode 100644 index 0000000..cc909a3 --- /dev/null +++ b/src/ansiblelint/rules/loop_var_prefix.py @@ -0,0 +1,100 @@ +"""Optional Ansible-lint rule to enforce use of prefix on role loop vars.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.config import LOOP_VAR_PREFIX, options +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import toidentifier + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RoleLoopVarPrefix(AnsibleLintRule): + """Role loop_var should use configured prefix.""" + + id = "loop-var-prefix" + link = ( + "https://docs.ansible.com/ansible/latest/playbook_guide/" + "playbooks_loops.html#defining-inner-and-outer-variable-names-with-loop-var" + ) + description = """\ +Looping inside roles has the risk of clashing with loops from user-playbooks.\ +""" + + tags = ["idiom"] + prefix = re.compile("") + severity = "MEDIUM" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for a task.""" + if not file or not file.role or not options.loop_var_prefix: + return [] + + self.prefix = re.compile( + options.loop_var_prefix.format(role=toidentifier(file.role)) + ) + has_loop = "loop" in task + for key in task.keys(): + if key.startswith("with_"): + has_loop = True + + if has_loop: + loop_control = task.get("loop_control", {}) + loop_var = loop_control.get("loop_var", "") + + if loop_var: + if not self.prefix.match(loop_var): + return [ + self.create_matcherror( + message=f"Loop variable name does not match /{options.loop_var_prefix}/ regex, where role={toidentifier(file.role)}.", + filename=file, + tag="loop-var-prefix[wrong]", + ) + ] + else: + return [ + self.create_matcherror( + message=f"Replace unsafe implicit `item` loop variable by adding a `loop_var` that is matching /{options.loop_var_prefix}/ regex.", + filename=file, + tag="loop-var-prefix[missing]", + ) + ] + + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures"), + ( + pytest.param( + "examples/playbooks/roles/loop_var_prefix/tasks/pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/roles/loop_var_prefix/tasks/fail.yml", 5, id="fail" + ), + ), + ) + def test_loop_var_prefix( + default_rules_collection: RulesCollection, test_file: str, failures: int + ) -> None: + """Test rule matches.""" + # Enable checking of loop variable prefixes in roles + options.loop_var_prefix = LOOP_VAR_PREFIX + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == RoleLoopVarPrefix().id + assert len(results) == failures diff --git a/src/ansiblelint/rules/meta_incorrect.md b/src/ansiblelint/rules/meta_incorrect.md new file mode 100644 index 0000000..b1e8793 --- /dev/null +++ b/src/ansiblelint/rules/meta_incorrect.md @@ -0,0 +1,32 @@ +# meta-incorrect + +This rule checks role metadata for fields with undefined or default values. +Always set appropriate values for the following metadata fields in the `meta/main.yml` file: + +- `author` +- `description` +- `company` +- `license` + +## Problematic Code + +```yaml +--- +# Metadata fields for the role contain default values. +galaxy_info: + author: your name + description: your role description + company: your company (optional) + license: license (GPL-2.0-or-later, MIT, etc) +``` + +## Correct Code + +```yaml +--- +galaxy_info: + author: Leroy Jenkins + description: This role will set you free. + company: Red Hat + license: Apache +``` diff --git a/src/ansiblelint/rules/meta_incorrect.py b/src/ansiblelint/rules/meta_incorrect.py new file mode 100644 index 0000000..160b8ec --- /dev/null +++ b/src/ansiblelint/rules/meta_incorrect.py @@ -0,0 +1,56 @@ +"""Implementation of meta-incorrect rule.""" +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +from typing import TYPE_CHECKING + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaChangeFromDefaultRule(AnsibleLintRule): + """meta/main.yml default values should be changed.""" + + id = "meta-incorrect" + field_defaults = [ + ("author", "your name"), + ("description", "your description"), + ("company", "your company (optional)"), + ("license", "license (GPLv2, CC-BY, etc)"), + ("license", "license (GPL-2.0-or-later, MIT, etc)"), + ] + values = ", ".join(sorted({f[0] for f in field_defaults})) + description = ( + f"You should set appropriate values in meta/main.yml for these fields: {values}" + ) + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + def matchyaml(self, file: Lintable) -> list[MatchError]: + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + results = [] + for field, default in self.field_defaults: + value = galaxy_info.get(field, None) + if value and value == default: + results.append( + self.create_matcherror( + filename=file, + linenumber=file.data[LINE_NUMBER_KEY], + message=f"Should change default metadata: {field}", + ) + ) + + return results diff --git a/src/ansiblelint/rules/meta_no_info.md b/src/ansiblelint/rules/meta_no_info.md new file mode 100644 index 0000000..7617cd2 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_info.md @@ -0,0 +1,28 @@ +# meta-no-info + +This rule checks role metadata for missing information. +Always set appropriate values for the following metadata fields in the `meta/main.yml` file, under `galaxy_info` key: + +- `platforms` +- `min_ansible_version` + +## Problematic Code + +```yaml +--- +# The metadata fields for minimum Ansible version and supported platforms are not set. +galaxy_info: + min_ansible_version: +``` + +## Correct Code + +```yaml +--- +galaxy_info: + min_ansible_version: "2.8" + platforms: + - name: Fedora + versions: + - all +``` diff --git a/src/ansiblelint/rules/meta_no_info.py b/src/ansiblelint/rules/meta_no_info.py new file mode 100644 index 0000000..0e3c046 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_info.py @@ -0,0 +1,83 @@ +"""Implementation of meta-no-info rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +from typing import TYPE_CHECKING, Generator + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from typing import Any, Tuple + + +META_STR_INFO = ("author", "description") +META_INFO = tuple( + list(META_STR_INFO) + + [ + "license", + "min_ansible_version", + "platforms", + ] +) + + +def _platform_info_errors_itr( + platforms: list[dict[str, str]], +) -> Generator[str, None, None]: + if not isinstance(platforms, list): + yield "Platforms should be a list of dictionaries" + return + + for platform in platforms: + if not isinstance(platform, dict): + yield "Platforms should be a list of dictionaries" + elif "name" not in platform: + yield "Platform should contain name" + + +def _galaxy_info_errors_itr( + galaxy_info: dict[str, Any], + info_list: tuple[str, ...] = META_INFO, + str_info_list: tuple[str, ...] = META_STR_INFO, +) -> Generator[str, None, None]: + for info in info_list: + g_info = galaxy_info.get(info, False) + if g_info: + if info in str_info_list and not isinstance(g_info, str): + yield f"{info} should be a string" + elif info == "platforms": + yield from _platform_info_errors_itr(g_info) + else: + yield f"Role info should contain {info}" + + +class MetaMainHasInfoRule(AnsibleLintRule): + """meta/main.yml should contain relevant info.""" + + id = "meta-no-info" + str_info = META_STR_INFO + info = META_INFO + description = f"meta/main.yml should contain: {', '.join(info)}" + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind != "meta": + return [] + + # since Ansible 2.10 we can add a meta/requirements.yml but + # we only want to match on meta/main.yml + if file.path.name != "main.yml": + return [] + + galaxy_info = data.get("galaxy_info", False) + if galaxy_info: + return [ + self.create_matcherror(message=err, filename=file) + for err in _galaxy_info_errors_itr(galaxy_info) + ] + return [self.create_matcherror(message="No 'galaxy_info' found", filename=file)] diff --git a/src/ansiblelint/rules/meta_no_tags.md b/src/ansiblelint/rules/meta_no_tags.md new file mode 100644 index 0000000..9518549 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_tags.md @@ -0,0 +1,22 @@ +# meta-no-tags + +This rule checks role metadata for tags with special characters. +Always use lowercase numbers and letters for tags in the `meta/main.yml` file. + +## Problematic Code + +```yaml +--- +# Metadata tags contain upper-case letters and special characters. +galaxy_info: + galaxy_tags: [MyTag#1, MyTag&^-] +``` + +## Correct Code + +```yaml +--- +# Metadata tags contain only lowercase letters and numbers. +galaxy_info: + galaxy_tags: [mytag1, mytag2] +``` diff --git a/src/ansiblelint/rules/meta_no_tags.py b/src/ansiblelint/rules/meta_no_tags.py new file mode 100644 index 0000000..4fab461 --- /dev/null +++ b/src/ansiblelint/rules/meta_no_tags.py @@ -0,0 +1,163 @@ +"""Implementation of meta-no-tags rule.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING + +from ansiblelint.rules import AnsibleLintRule + +# Copyright (c) 2018, Ansible Project + + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaTagValidRule(AnsibleLintRule): + """Tags must contain lowercase letters and digits only.""" + + id = "meta-no-tags" + description = ( + "Tags must contain lowercase letters and digits only, " + "and ``galaxy_tags`` is expected to be a list" + ) + severity = "HIGH" + tags = ["metadata"] + version_added = "v4.0.0" + + TAG_REGEXP = re.compile("^[a-z0-9]+$") + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Find violations inside meta files.""" + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + tags = [] + results = [] + + if "galaxy_tags" in galaxy_info: + if isinstance(galaxy_info["galaxy_tags"], list): + tags += galaxy_info["galaxy_tags"] + else: + results.append( + self.create_matcherror( + "Expected 'galaxy_tags' to be a list", filename=file + ) + ) + + if "categories" in galaxy_info: + results.append( + self.create_matcherror( + "Use 'galaxy_tags' rather than 'categories'", filename=file + ) + ) + if isinstance(galaxy_info["categories"], list): + tags += galaxy_info["categories"] + else: + results.append( + self.create_matcherror( + "Expected 'categories' to be a list", filename=file + ) + ) + + for tag in tags: + msg = self.shortdesc + if not isinstance(tag, str): + results.append( + self.create_matcherror( + f"Tags must be strings: '{tag}'", filename=file + ) + ) + continue + if not re.match(self.TAG_REGEXP, tag): + results.append( + self.create_matcherror( + message=f"{msg}, invalid: '{tag}'", filename=file + ) + ) + + return results + + +META_TAG_VALID = """ +galaxy_info: + galaxy_tags: ['database', 'my s q l', 'MYTAG'] + categories: 'my_category_not_in_a_list' +""" + +META_TAG_NO_GALAXY_INFO = """ +galaxy_tags: ['database', 'my s q l', 'MYTAG'] +""" + +META_TAG_NO_LIST = """ +galaxy_info: + galaxy_tags: 'database' +""" + +META_CATEGORIES_AS_LIST = """ +galaxy_info: + galaxy_tags: ['database', 'my s q l', 'MYTAG'] + categories: ['networking', 'posix'] +""" + +META_TAGS_NOT_A_STRING = """ +galaxy_info: + galaxy_tags: [False, 'database', 'my s q l', 'MYTAG'] + categories: 'networking' +""" + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_valid_tag_rule(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_VALID) + assert "Use 'galaxy_tags' rather than 'categories'" in str(results), results + assert "Expected 'categories' to be a list" in str(results) + assert "invalid: 'my s q l'" in str(results) + assert "invalid: 'MYTAG'" in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_no_galaxy_info(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_NO_GALAXY_INFO) + assert results == [] + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_no_galaxy_tags_list(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAG_NO_LIST) + assert "Expected 'galaxy_tags' to be a list" in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_galaxy_categories_as_list(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_CATEGORIES_AS_LIST) + assert "Use 'galaxy_tags' rather than 'categories'" in str(results), results + assert "Expected 'categories' to be a list" not in str(results) + + @pytest.mark.parametrize( + "rule_runner", (MetaTagValidRule,), indirect=["rule_runner"] + ) + def test_tags_not_a_string(rule_runner: Any) -> None: + """Test rule matches.""" + results = rule_runner.run_role_meta_main(META_TAGS_NOT_A_STRING) + assert "Tags must be strings" in str(results) diff --git a/src/ansiblelint/rules/meta_runtime.md b/src/ansiblelint/rules/meta_runtime.md new file mode 100644 index 0000000..5526912 --- /dev/null +++ b/src/ansiblelint/rules/meta_runtime.md @@ -0,0 +1,45 @@ +# meta-runtime + +This rule checks the meta/runtime.yml `requires_ansible` key against the list of currently supported versions of ansible-core. + +This rule can produce messages such: + +- `requires_ansible` key must be set to a supported version. + +Currently supported versions of ansible-core are: + +- `2.9.10` +- `2.11.x` +- `2.12.x` +- `2.13.x` +- `2.14.x` +- `2.15.x` + +This rule can produce messages such as: + +- `meta-runtime[unsupported-version]` - `requires_ansible` key must contain a supported version, shown in the list above. +- `meta-runtime[invalid-version]` - `requires_ansible` key must be a valid version identifier. + + +## Problematic code + +```yaml +# runtime.yml +--- +requires_ansible: ">=2.9" +``` + + +```yaml +# runtime.yml +--- +requires_ansible: "2.9" +``` + +## Correct code + +```yaml +# runtime.yml +--- +requires_ansible: ">=2.9.10" +``` diff --git a/src/ansiblelint/rules/meta_runtime.py b/src/ansiblelint/rules/meta_runtime.py new file mode 100644 index 0000000..2e2fdd5 --- /dev/null +++ b/src/ansiblelint/rules/meta_runtime.py @@ -0,0 +1,125 @@ +"""Implementation of meta-runtime rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING + +from packaging.specifiers import SpecifierSet + +from ansiblelint.rules import AnsibleLintRule + +# Copyright (c) 2018, Ansible Project + + +if TYPE_CHECKING: + from typing import Any + + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class CheckRequiresAnsibleVersion(AnsibleLintRule): + """Required ansible version in meta/runtime.yml must be a supported version.""" + + id = "meta-runtime" + description = ( + "The ``requires_ansible`` key in runtime.yml must specify " + "a supported platform version of ansible-core and be a valid version." + ) + severity = "VERY_HIGH" + tags = ["metadata"] + version_added = "v6.11.0 (last update)" + + # Refer to https://access.redhat.com/support/policy/updates/ansible-automation-platform + # Also add devel to this list + supported_ansible = ["2.9.10", "2.11.", "2.12.", "2.13.", "2.14.", "2.15."] + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Find violations inside meta files. + + :param file: Input lintable file that is a match for `meta-runtime` + :returns: List of errors matched to the input file + """ + results = [] + + if file.kind != "meta-runtime": + return [] + + version_required = file.data.get("requires_ansible", None) + + if version_required: + if not any( + version in version_required for version in self.supported_ansible + ): + results.append( + self.create_matcherror( + message="requires_ansible key must be set to a supported version.", + tag="meta-runtime[unsupported-version]", + filename=file, + ) + ) + + try: + SpecifierSet(version_required) + except ValueError: + results.append( + self.create_matcherror( + message="'requires_ansible' is not a valid requirement specification", + tag="meta-runtime[invalid-version]", + filename=file, + ) + ) + return results + + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures", "tags"), + ( + pytest.param( + "examples/meta_runtime_version_checks/pass/meta/runtime.yml", + 0, + "meta-runtime[unsupported-version]", + id="pass", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_0/meta/runtime.yml", + 1, + "meta-runtime[unsupported-version]", + id="fail0", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_1/meta/runtime.yml", + 1, + "meta-runtime[unsupported-version]", + id="fail1", + ), + pytest.param( + "examples/meta_runtime_version_checks/fail_2/meta/runtime.yml", + 1, + "meta-runtime[invalid-version]", + id="fail2", + ), + ), + ) + def test_meta_supported_version( + default_rules_collection: RulesCollection, + test_file: str, + failures: int, + tags: str, + ) -> None: + """Test rule matches.""" + default_rules_collection.register(CheckRequiresAnsibleVersion()) + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == CheckRequiresAnsibleVersion().id + assert result.tag == tags + assert len(results) == failures diff --git a/src/ansiblelint/rules/meta_video_links.md b/src/ansiblelint/rules/meta_video_links.md new file mode 100644 index 0000000..c3f051b --- /dev/null +++ b/src/ansiblelint/rules/meta_video_links.md @@ -0,0 +1,36 @@ +# meta-video-links + +This rule checks formatting for video links in metadata. Always use dictionaries +for items in the `meta/main.yml` file. + +Items in the `video_links` section must be in a dictionary and use the following +keys: + +- `url` +- `title` + +The value of the `url` key must be a shared link from YouTube, Vimeo, or Google +Drive. + +## Problematic Code + +```yaml +--- +galaxy_info: + video_links: + - https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Does not use the url key. + - my_bad_key: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Uses an unsupported key. + title: Incorrect key. + - url: www.acme.com/vid # <- Uses an unsupported url format. + title: Incorrect url format. +``` + +## Correct Code + +```yaml +--- +galaxy_info: + video_links: + - url: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be # <- Uses a supported shared link with the url key. + title: Correctly formatted video link. +``` diff --git a/src/ansiblelint/rules/meta_video_links.py b/src/ansiblelint/rules/meta_video_links.py new file mode 100644 index 0000000..402844a --- /dev/null +++ b/src/ansiblelint/rules/meta_video_links.py @@ -0,0 +1,80 @@ +"""Implementation of meta-video-links rule.""" +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from ansiblelint.constants import FILENAME_KEY, LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class MetaVideoLinksRule(AnsibleLintRule): + """meta/main.yml video_links should be formatted correctly.""" + + id = "meta-video-links" + description = ( + "Items in ``video_links`` in meta/main.yml should be " + "dictionaries, and contain only keys ``url`` and ``title``, " + "and have a shared link from a supported provider" + ) + severity = "LOW" + tags = ["metadata"] + version_added = "v4.0.0" + + VIDEO_REGEXP = { + "google": re.compile(r"https://drive\.google\.com.*file/d/([0-9A-Za-z-_]+)/.*"), + "vimeo": re.compile(r"https://vimeo\.com/([0-9]+)"), + "youtube": re.compile(r"https://youtu\.be/([0-9A-Za-z-_]+)"), + } + + def matchyaml(self, file: Lintable) -> list[MatchError]: + if file.kind != "meta" or not file.data: + return [] + + galaxy_info = file.data.get("galaxy_info", None) + if not galaxy_info: + return [] + + video_links = galaxy_info.get("video_links", None) + if not video_links: + return [] + + results = [] + + for video in video_links: + if not isinstance(video, dict): + results.append( + self.create_matcherror( + "Expected item in 'video_links' to be a dictionary", + filename=file, + ) + ) + continue + + if set(video) != {"url", "title", FILENAME_KEY, LINE_NUMBER_KEY}: + results.append( + self.create_matcherror( + "Expected item in 'video_links' to contain " + "only keys 'url' and 'title'", + filename=file, + ) + ) + continue + + for _, expr in self.VIDEO_REGEXP.items(): + if expr.match(video["url"]): + break + else: + msg = ( + f"URL format '{video['url']}' is not recognized. " + "Expected it be a shared link from Vimeo, YouTube, " + "or Google Drive." + ) + results.append(self.create_matcherror(msg, filename=file)) + + return results diff --git a/src/ansiblelint/rules/name.md b/src/ansiblelint/rules/name.md new file mode 100644 index 0000000..9df4213 --- /dev/null +++ b/src/ansiblelint/rules/name.md @@ -0,0 +1,61 @@ +# name + +This rule identifies several problems related to the naming of tasks and plays. +This is important because these names are the primary way to **identify** and +**document** executed operations on the console, logs or web interface. + +This rule can produce messages as: + +- `name[casing]` - All names should start with an uppercase letter for languages + that support it. +- `name[missing]` - All tasks should be named. +- `name[play]` - All plays should be named. +- `name[prefix]` - Prefix task names in sub-tasks files. (opt-in) +- `name[template]` - Jinja templates should only be at the end of 'name'. This + helps with the identification of tasks inside the source code when they fail. + The use of templating inside `name` keys is discouraged as there are multiple + cases where the rendering of the name template is not possible. + +If you want to ignore some of the messages above, you can add any of them to the +`skip_list`. + +## name[prefix] + +This rule applies only to included task files that are not named `main.yml`. It +suggests adding the stem of the file as a prefix to the task name. + +For example, if you have a task named `Restart server` inside a file named +`tasks/deploy.yml`, this rule suggests renaming it to `deploy | Restart server`, +so it would be easier to identify where it comes from. + +For the moment, this sub-rule is just an **opt-in**, so you need to add it to +your `enable_list` to activate it. + +!!! note + + This rule was designed by [Red Hat Community of Practice](https://redhat-cop.github.io/automation-good-practices/#_prefix_task_names_in_sub_tasks_files_of_roles). The reasoning behind it being + that in a complex roles or playbooks with multiple (sub-)tasks file, it becomes + difficult to understand which task belongs to which file. Adding a prefix, in + combination with the role’s name automatically added by Ansible, makes it a + lot easier to follow and troubleshoot a role play. + +## Problematic code + +```yaml +--- +- hosts: localhost # <-- playbook name[play] + tasks: + - name: create placefolder file # <-- name[casing] due lack of capital letter + ansible.builtin.command: touch /tmp/.placeholder +``` + +## Correct code + +```yaml +--- +- name: Play for creating placeholder + hosts: localhost + tasks: + - name: Create placeholder file + ansible.builtin.command: touch /tmp/.placeholder +``` diff --git a/src/ansiblelint/rules/name.py b/src/ansiblelint/rules/name.py new file mode 100644 index 0000000..671829e --- /dev/null +++ b/src/ansiblelint/rules/name.py @@ -0,0 +1,214 @@ +"""Implementation of NameRule.""" +from __future__ import annotations + +import re +import sys +from copy import deepcopy +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable # noqa: F811 + + +class NameRule(AnsibleLintRule): + """Rule for checking task and play names.""" + + id = "name" + description = ( + "All tasks and plays should have a distinct name for readability " + "and for ``--start-at-task`` to work" + ) + severity = "MEDIUM" + tags = ["idiom"] + version_added = "v6.9.1 (last update)" + _re_templated_inside = re.compile(r".*\{\{.*\}\}.*\w.*$") + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific play (entry in playbook).""" + results = [] + if file.kind != "playbook": + return [] + if "name" not in data: + return [ + self.create_matcherror( + message="All plays should be named.", + linenumber=data[LINE_NUMBER_KEY], + tag="name[play]", + filename=file, + ) + ] + results.extend( + self._check_name( + data["name"], lintable=file, linenumber=data[LINE_NUMBER_KEY] + ) + ) + return results + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results = [] + name = task.get("name") + if not name: + results.append( + self.create_matcherror( + message="All tasks should be named.", + linenumber=task[LINE_NUMBER_KEY], + tag="name[missing]", + filename=file, + ) + ) + else: + results.extend( + self._prefix_check( + name, lintable=file, linenumber=task[LINE_NUMBER_KEY] + ) + ) + return results + + def _prefix_check( + self, name: str, lintable: Lintable | None, linenumber: int + ) -> list[MatchError]: + results: list[MatchError] = [] + effective_name = name + if lintable is None: + return [] + + if not results: + results.extend( + self._check_name( + effective_name, lintable=lintable, linenumber=linenumber + ) + ) + return results + + def _check_name( + self, name: str, lintable: Lintable | None, linenumber: int + ) -> list[MatchError]: + # This rules applies only to languages that do have uppercase and + # lowercase letter, so we ignore anything else. On Unicode isupper() + # is not necessarily the opposite of islower() + results = [] + # stage one check prefix + effective_name = name + if self._collection and lintable: + prefix = self._collection.options.task_name_prefix.format( + stem=lintable.path.stem + ) + if lintable.kind == "tasks" and lintable.path.stem != "main": + if not name.startswith(prefix): + # For the moment in order to raise errors this rule needs to be + # enabled manually. Still, we do allow use of prefixes even without + # having to enable the rule. + if "name[prefix]" in self._collection.options.enable_list: + results.append( + self.create_matcherror( + message=f"Task name should start with '{prefix}'.", + linenumber=linenumber, + tag="name[prefix]", + filename=lintable, + ) + ) + return results + else: + effective_name = name[len(prefix) :] + + if ( + effective_name[0].isalpha() + and effective_name[0].islower() + and not effective_name[0].isupper() + ): + results.append( + self.create_matcherror( + message="All names should start with an uppercase letter.", + linenumber=linenumber, + tag="name[casing]", + filename=lintable, + ) + ) + if self._re_templated_inside.match(name): + results.append( + self.create_matcherror( + message="Jinja templates should only be at the end of 'name'", + linenumber=linenumber, + tag="name[template]", + filename=lintable, + ) + ) + return results + + +if "pytest" in sys.modules: # noqa: C901 + from ansiblelint.config import options + from ansiblelint.file_utils import Lintable # noqa: F811 + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_file_positive() -> None: + """Positive test for unnamed-task.""" + collection = RulesCollection() + collection.register(NameRule()) + success = "examples/playbooks/rule-name-missing-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_file_negative() -> None: + """Negative test for unnamed-task.""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-missing-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 5 + + def test_name_prefix_negative() -> None: + """Negative test for unnamed-task.""" + custom_options = deepcopy(options) + custom_options.enable_list = ["name[prefix]"] + collection = RulesCollection(options=custom_options) + collection.register(NameRule()) + failure = Lintable( + "examples/playbooks/tasks/rule-name-prefix-fail.yml", kind="tasks" + ) + bad_runner = Runner(failure, rules=collection) + results = bad_runner.run() + assert len(results) == 3 + # , "\n".join(results) + assert results[0].tag == "name[casing]" + assert results[1].tag == "name[prefix]" + assert results[2].tag == "name[prefix]" + + def test_rule_name_lowercase() -> None: + """Negative test for a task that starts with lowercase.""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-casing.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + assert errs[0].tag == "name[casing]" + assert errs[0].rule.id == "name" + + def test_name_play() -> None: + """Positive test for name[play].""" + collection = RulesCollection() + collection.register(NameRule()) + success = "examples/playbooks/rule-name-play-fail.yml" + errs = Runner(success, rules=collection).run() + assert len(errs) == 1 + assert errs[0].tag == "name[play]" + assert errs[0].rule.id == "name" + + def test_name_template() -> None: + """Negative test for name[templated].""" + collection = RulesCollection() + collection.register(NameRule()) + failure = "examples/playbooks/rule-name-templated-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 1 + assert errs[0].tag == "name[template]" diff --git a/src/ansiblelint/rules/no_changed_when.md b/src/ansiblelint/rules/no_changed_when.md new file mode 100644 index 0000000..2e0cb82 --- /dev/null +++ b/src/ansiblelint/rules/no_changed_when.md @@ -0,0 +1,46 @@ +# no-changed-when + +This rule checks that tasks return changes to results or conditions. Unless +tasks only read information, you should ensure that they return changes in the +following ways: + +- Register results or conditions and use the `changed_when` clause. +- Use the `creates` or `removes` argument. + +You should always use the `changed_when` clause on tasks that do not naturally +detect if a change has occurred or not. Some of the most common examples are +[shell] and [command] modules, which run arbitrary commands. + +One very common workaround is to use a boolean value like `changed_when: false` +if the task never changes anything or `changed_when: true` if it always +changes something, but you can also use any expressions, including ones that +use the registered result of a task, like in our example below. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Does not handle any output or return codes + ansible.builtin.command: cat {{ my_file | quote }} # <- Does not handle the command output. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Handle shell output with return code + ansible.builtin.command: cat {{ my_file | quote }} + register: my_output # <- Registers the command output. + changed_when: my_output.rc != 0 # <- Uses the return code to define when the task has changed. +``` + +[shell]: + https://docs.ansible.com/ansible/latest/collections/ansible/builtin/shell_module.html +[command]: + https://docs.ansible.com/ansible/latest/collections/ansible/builtin/command_module.html diff --git a/src/ansiblelint/rules/no_changed_when.py b/src/ansiblelint/rules/no_changed_when.py new file mode 100644 index 0000000..488b0c2 --- /dev/null +++ b/src/ansiblelint/rules/no_changed_when.py @@ -0,0 +1,94 @@ +"""Implementation of the no-changed-when rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class CommandHasChangesCheckRule(AnsibleLintRule): + """Commands should not change things if nothing needs doing.""" + + id = "no-changed-when" + severity = "HIGH" + tags = ["command-shell", "idempotency"] + version_added = "historic" + + _commands = [ + "ansible.builtin.command", + "ansible.builtin.shell", + "ansible.builtin.raw", + "ansible.legacy.command", + "ansible.legacy.shell", + "ansible.legacy.raw", + "command", + "shell", + "raw", + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + result = [] + # tasks in a block are "meta" type + if task["__ansible_action_type__"] in ["task", "meta"]: + if task["action"]["__ansible_module__"] in self._commands and ( + "changed_when" not in task + and "creates" not in task["action"] + and "removes" not in task["action"] + ): + result.append(self.create_matcherror(filename=file)) + return result + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param( + "examples/playbooks/rule-no-changed-when-pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/rule-no-changed-when-fail.yml", 3, id="fail" + ), + ), + ) + def test_rule_no_changed_when( + default_rules_collection: RulesCollection, file: str, expected: int + ) -> None: + """Validate no-changed-when rule.""" + results = Runner(file, rules=default_rules_collection).run() + + for result in results: + assert result.rule.id == CommandHasChangesCheckRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/no_free_form.md b/src/ansiblelint/rules/no_free_form.md new file mode 100644 index 0000000..3ce5140 --- /dev/null +++ b/src/ansiblelint/rules/no_free_form.md @@ -0,0 +1,58 @@ +# no-free-form + +This rule identifies any use of +[free-form](https://docs.ansible.com/ansible/2.7/user_guide/playbooks_intro.html#action-shorthand) +module calling syntax and asks for switching to the full syntax. + +**Free-form** syntax, also known as **inline** or **shorthand**, can produce +subtle bugs. It can also prevent editors and IDEs from providing feedback, +autocomplete and validation for the edited line. + +!!! note + + As long you just pass a YAML string that contains a `=` character inside as the + parameter to the action module name, we consider this as using free-formsyntax. + Be sure you pass a dictionary to the module, so the free-form parsing is never + triggered. + +As `raw` module only accepts free-form, we trigger `no-free-form[raw]` only if +we detect the presence of `executable=` inside raw calls. We advice the explicit +use of `args:` dictionary for configuring the executable to be run. + +This rule can produce messages such: + +- `no-free-form` - Free-form syntax is discouraged. +- `no-free-form[raw-non-string]` - Passing a non string value to `raw` module is + neither documented or supported. + +## Problematic code + +```yaml +--- +- name: Example with discouraged free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: chdir=/tmp touch foo # <-- don't use free-form + - name: Use raw to echo + ansible.builtin.raw: executable=/bin/bash echo foo # <-- don't use executable= + changed_when: false +``` + +## Correct code + +```yaml +--- +- name: Example that avoids free-form syntax + hosts: localhost + tasks: + - name: Create a placefolder file + ansible.builtin.command: + cmd: touch foo # <-- ansible will not touch it + chdir: /tmp + - name: Use raw to echo + ansible.builtin.raw: echo foo + args: + executable: /bin/bash # <-- explicit is better + changed_when: false +``` diff --git a/src/ansiblelint/rules/no_free_form.py b/src/ansiblelint/rules/no_free_form.py new file mode 100644 index 0000000..5a23e8b --- /dev/null +++ b/src/ansiblelint/rules/no_free_form.py @@ -0,0 +1,107 @@ +"""Implementation of NoFreeFormRule.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import INCLUSION_ACTION_NAMES, LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoFreeFormRule(AnsibleLintRule): + """Rule for detecting discouraged free-form syntax for action modules.""" + + id = "no-free-form" + description = "Avoid free-form inside files as it can produce subtile bugs." + severity = "MEDIUM" + tags = ["syntax", "risk"] + version_added = "v6.8.0" + needs_raw_task = True + cmd_shell_re = re.compile( + r"(chdir|creates|executable|removes|stdin|stdin_add_newline|warn)=" + ) + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results: list[MatchError] = [] + action = task["action"]["__ansible_module_original__"] + + if action in INCLUSION_ACTION_NAMES: + return results + + action_value = task["__raw_task__"].get(action, None) + if task["action"].get("__ansible_module__", None) == "raw": + if isinstance(action_value, str): + if "executable=" in action_value: + results.append( + self.create_matcherror( + message="Avoid embedding `executable=` inside raw calls, use explicit args dictionary instead.", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + tag=f"{self.id}[raw]", + ) + ) + else: + results.append( + self.create_matcherror( + message="Passing a non string value to `raw` module is neither documented or supported.", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + tag=f"{self.id}[raw-non-string]", + ) + ) + elif isinstance(action_value, str) and "=" in action_value: + fail = False + if task["action"].get("__ansible_module__") in ( + "ansible.builtin.command", + "ansible.builtin.shell", + "ansible.windows.win_command", + "ansible.windows.win_shell", + "command", + "shell", + "win_command", + "win_shell", + ): + if self.cmd_shell_re.match(action_value): + fail = True + else: + fail = True + if fail: + results.append( + self.create_matcherror( + message=f"Avoid using free-form when calling module actions. ({action})", + linenumber=task[LINE_NUMBER_KEY], + filename=file, + ) + ) + return results + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param("examples/playbooks/rule-no-free-form-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/rule-no-free-form-fail.yml", 2, id="fail"), + ), + ) + def test_rule_no_free_form( + default_rules_collection: RulesCollection, file: str, expected: int + ) -> None: + """Validate that rule works as intended.""" + results = Runner(file, rules=default_rules_collection).run() + + for result in results: + assert result.rule.id == NoFreeFormRule.id, result + assert len(results) == expected diff --git a/src/ansiblelint/rules/no_handler.md b/src/ansiblelint/rules/no_handler.md new file mode 100644 index 0000000..4deccaa --- /dev/null +++ b/src/ansiblelint/rules/no_handler.md @@ -0,0 +1,55 @@ +# no-handler + +This rule checks for the correct handling of changes to results or conditions. + +If a task has a `when: result.changed` condition, it effectively acts as a +[handler](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers). +The recommended approach is to use `notify` and move tasks to `handlers`. +If necessary you can silence the rule by add a `# noqa: no-handler` comment at the end of the line. + +## Problematic Code + +```yaml +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: 0600 + register: result # <-- Registers the result of the task. + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! + when: result.changed # <-- Triggers the no-handler rule. +``` + +```yaml +--- +# Optionally silences the rule. +when: result.changed # noqa: no-handler +``` + +## Correct Code + +The following code includes the same functionality as the problematic code without recording a `result` variable. + +```yaml +--- +- name: Example of no-handler rule + hosts: localhost + tasks: + - name: Register result of a task + ansible.builtin.copy: + dest: "/tmp/placeholder" + content: "Ansible made this!" + mode: 0600 + notify: + - Second command to run # <-- Handler runs only when the file changes. + handlers: + - name: Second command to run + ansible.builtin.debug: + msg: The placeholder file was modified! +``` diff --git a/src/ansiblelint/rules/no_handler.py b/src/ansiblelint/rules/no_handler.py new file mode 100644 index 0000000..a7de4e4 --- /dev/null +++ b/src/ansiblelint/rules/no_handler.py @@ -0,0 +1,157 @@ +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +"""UseHandlerRatherThanWhenChangedRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +def _changed_in_when(item: str) -> bool: + item_list = item.split() + + if not isinstance(item, str) or "and" in item_list: + return False + return any( + changed in item + for changed in [ + ".changed", + "|changed", + '["changed"]', + "['changed']", + "is changed", + ] + ) + + +class UseHandlerRatherThanWhenChangedRule(AnsibleLintRule): + """Tasks that run when changed should likely be handlers.""" + + id = "no-handler" + description = ( + "If a task has a ``when: result.changed`` setting, it is effectively " + "acting as a handler. You could use ``notify`` and move that task to " + "``handlers``." + ) + link = "https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html#handlers" + severity = "MEDIUM" + tags = ["idiom"] + version_added = "historic" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["__ansible_action_type__"] != "task": + return False + + when = task.get("when") + + if isinstance(when, list): + for item in when: + return _changed_in_when(item) + if isinstance(when, str): + return _changed_in_when(when) + return False + + +if "pytest" in sys.modules: + import pytest + + SUCCEED_CHANGED_WHEN = """ +- hosts: all + tasks: + - name: Execute something + command: echo 123 + register: result + changed_when: true +""" + + SUCCEED_WHEN_AND = """ +- hosts: all + tasks: + - name: Registering task 1 + command: echo Hello + register: r1 + changed_when: true + + - name: Registering task 2 + command: echo Hello + register: r2 + changed_when: true + + - name: Use when task + command: echo Hello + when: r1.changed and r2.changed +""" + + FAIL_RESULT_IS_CHANGED = """ +- hosts: all + tasks: + - name: This should trigger no-handler rule + command: echo could be done better + when: result is changed +""" + + FAILED_SOMETHING_CHANGED = """ +- hosts: all + tasks: + - name: Do anything + command: echo 123 + when: + - something.changed +""" + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_succeed_changed_when(rule_runner: Any) -> None: + """Using changed_when is acceptable.""" + results = rule_runner.run_playbook(SUCCEED_CHANGED_WHEN) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_succeed_when_and(rule_runner: Any) -> None: + """See https://github.com/ansible/ansible-lint/issues/1526.""" + results = rule_runner.run_playbook(SUCCEED_WHEN_AND) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_fail_result_is_changed(rule_runner: Any) -> None: + """This task uses 'is changed'.""" + results = rule_runner.run_playbook(FAIL_RESULT_IS_CHANGED) + assert len(results) == 1 + + @pytest.mark.parametrize( + "rule_runner", (UseHandlerRatherThanWhenChangedRule,), indirect=["rule_runner"] + ) + def test_failed_something_changed(rule_runner: Any) -> None: + """This task uses '.changed'.""" + results = rule_runner.run_playbook(FAILED_SOMETHING_CHANGED) + assert len(results) == 1 diff --git a/src/ansiblelint/rules/no_jinja_when.md b/src/ansiblelint/rules/no_jinja_when.md new file mode 100644 index 0000000..702e807 --- /dev/null +++ b/src/ansiblelint/rules/no_jinja_when.md @@ -0,0 +1,32 @@ +# no-jinja-when + +This rule checks conditional statements for Jinja expressions in curly brackets `{{ }}`. +Ansible processes conditionals statements that use the `when`, `failed_when`, and `changed_when` clauses as Jinja expressions. + +An Ansible rule is to always use `{{ }}` except with `when` keys. +Using `{{ }}` in conditionals creates a nested expression, which is an Ansible +anti-pattern and does not produce expected results. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Shut down Debian systems + ansible.builtin.command: /sbin/shutdown -t now + when: "{{ ansible_facts['os_family'] == 'Debian' }}" # <- Nests a Jinja expression in a conditional statement. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Shut down Debian systems + ansible.builtin.command: /sbin/shutdown -t now + when: ansible_facts['os_family'] == "Debian" # <- Uses facts in a conditional statement. +``` diff --git a/src/ansiblelint/rules/no_jinja_when.py b/src/ansiblelint/rules/no_jinja_when.py new file mode 100644 index 0000000..c5ea41b --- /dev/null +++ b/src/ansiblelint/rules/no_jinja_when.py @@ -0,0 +1,83 @@ +"""Implementation of no-jinja-when rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class NoFormattingInWhenRule(AnsibleLintRule): + """No Jinja2 in when.""" + + id = "no-jinja-when" + description = ( + "``when`` is a raw Jinja2 expression, remove redundant {{ }} from variable(s)." + ) + severity = "HIGH" + tags = ["deprecations"] + version_added = "historic" + + @staticmethod + def _is_valid(when: str) -> bool: + if isinstance(when, list): + for item in when: + if ( + isinstance(item, str) + and item.find("{{") != -1 + and item.find("}}") != -1 + ): + return False + return True + if not isinstance(when, str): + return True + return when.find("{{") == -1 and when.find("}}") == -1 + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + errors: list[MatchError] = [] + if isinstance(data, dict): + if "roles" not in data or data["roles"] is None: + return errors + for role in data["roles"]: + if self.matchtask(role, file=file): + errors.append( + self.create_matcherror( + details=str({"when": role}), + filename=file, + linenumber=role[LINE_NUMBER_KEY], + ) + ) + return errors + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + return "when" in task and not self._is_valid(task["when"]) + + +if "pytest" in sys.modules: + # Tests for no-jinja-when rule. + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + def test_file_positive() -> None: + """Positive test for no-jinja-when.""" + collection = RulesCollection() + collection.register(NoFormattingInWhenRule()) + success = "examples/playbooks/rule-no-jinja-when-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_file_negative() -> None: + """Negative test for no-jinja-when.""" + collection = RulesCollection() + collection.register(NoFormattingInWhenRule()) + failure = "examples/playbooks/rule-no-jinja-when-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 3 diff --git a/src/ansiblelint/rules/no_log_password.md b/src/ansiblelint/rules/no_log_password.md new file mode 100644 index 0000000..579dd11 --- /dev/null +++ b/src/ansiblelint/rules/no_log_password.md @@ -0,0 +1,45 @@ +# no-log-password + +This rule ensures playbooks do not write passwords to logs when using loops. +Always set the `no_log: true` attribute to protect sensitive data. + +While most Ansible modules mask sensitive data, using secrets inside a loop can result in those secrets being logged. +Explicitly adding `no_log: true` prevents accidentally exposing secrets. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Log user passwords + ansible.builtin.user: + name: john_doe + comment: John Doe + uid: 1040 + group: admin + password: "{{ item }}" + with_items: + - wow + no_log: false # <- Sets the no_log attribute to false. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Do not log user passwords + ansible.builtin.user: + name: john_doe + comment: John Doe + uid: 1040 + group: admin + password: "{{ item }}" + with_items: + - wow + no_log: true # <- Sets the no_log attribute to a non-false value. +``` diff --git a/src/ansiblelint/rules/no_log_password.py b/src/ansiblelint/rules/no_log_password.py new file mode 100644 index 0000000..bba2415 --- /dev/null +++ b/src/ansiblelint/rules/no_log_password.py @@ -0,0 +1,265 @@ +# Copyright 2018, Rackspace US, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""NoLogPasswordsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoLogPasswordsRule(AnsibleLintRule): + """Password should not be logged.""" + + id = "no-log-password" + description = ( + "When passing password argument you should have no_log configured " + "to a non False value to avoid accidental leaking of secrets." + ) + severity = "LOW" + tags = ["opt-in", "security", "experimental"] + version_added = "v5.0.9" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module_original__"] == "ansible.builtin.user" and ( + (task["action"].get("password_lock") or task["action"].get("password_lock")) + and not task["action"].get("password") + ): + has_password = False + else: + for param in task["action"].keys(): + if "password" in param: + has_password = True + break + else: + has_password = False + + has_loop = [key for key in task if key.startswith("with_") or key == "loop"] + # No no_log and no_log: False behave the same way + # and should return a failure (return True), so we + # need to invert the boolean + no_log = task.get("no_log", False) + + if ( + isinstance(no_log, str) + and no_log.startswith("{{") + and no_log.endswith("}}") + ): + # we cannot really evaluate jinja expressions + return False + + return bool( + has_password and not convert_to_boolean(no_log) and len(has_loop) > 0 + ) + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + NO_LOG_UNUSED = """ +- name: Test + hosts: all + tasks: + - name: Succeed when no_log is not used but no loop present + ansible.builtin.user: + name: john_doe + password: "wow" + state: absent +""" + + NO_LOG_FALSE = """ +- hosts: all + tasks: + - name: Use of jinja for no_log is valid + user: + name: john_doe + user_password: "{{ item }}" + state: absent + no_log: "{{ False }}" + - name: Fail when no_log is set to False + user: + name: john_doe + user_password: "{{ item }}" + state: absent + with_items: + - wow + - now + no_log: False + - name: Fail when no_log is set to False + ansible.builtin.user: + name: john_doe + user_password: "{{ item }}" + state: absent + with_items: + - wow + - now + no_log: False +""" + + NO_LOG_NO = """ +- hosts: all + tasks: + - name: Fail when no_log is set to no + user: + name: john_doe + password: "{{ item }}" + state: absent + no_log: no + loop: + - wow + - now +""" + + PASSWORD_WITH_LOCK = """ +- hosts: all + tasks: + - name: Fail when password is set and password_lock is true + user: + name: "{{ item }}" + password: "wow" + password_lock: true + with_random_choice: + - ansible + - lint +""" + + NO_LOG_YES = """ +- hosts: all + tasks: + - name: Succeed when no_log is set to yes + with_list: + - name: user + password: wow + - password: now + name: ansible + user: + name: "{{ item.name }}" + password: "{{ item.password }}" + state: absent + no_log: yes +""" + + NO_LOG_TRUE = """ +- hosts: all + tasks: + - name: Succeed when no_log is set to True + user: + name: john_doe + user_password: "{{ item }}" + state: absent + no_log: True + loop: + - wow + - now +""" + + PASSWORD_LOCK_YES = """ +- hosts: all + tasks: + - name: Succeed when only password locking account + user: + name: "{{ item }}" + password_lock: yes + # user_password: "this is a comment, not a password" + with_list: + - ansible + - lint +""" + + PASSWORD_LOCK_FALSE = """ +- hosts: all + tasks: + - name: Succeed when password_lock is false and password is not used + user: + name: lint + password_lock: False +""" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_unused(rule_runner: RunFromText) -> None: + """The task does not use no_log but also no loop.""" + results = rule_runner.run_playbook(NO_LOG_UNUSED) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_false(rule_runner: RunFromText) -> None: + """The task sets no_log to false.""" + results = rule_runner.run_playbook(NO_LOG_FALSE) + assert len(results) == 2 + for result in results: + assert result.rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_no(rule_runner: RunFromText) -> None: + """The task sets no_log to no.""" + results = rule_runner.run_playbook(NO_LOG_NO) + assert len(results) == 1 + assert results[0].rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_password_with_lock(rule_runner: RunFromText) -> None: + """The task sets a password but also lock the user.""" + results = rule_runner.run_playbook(PASSWORD_WITH_LOCK) + assert len(results) == 1 + assert results[0].rule.id == "no-log-password" + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_yes(rule_runner: RunFromText) -> None: + """The task sets no_log to yes.""" + results = rule_runner.run_playbook(NO_LOG_YES) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_true(rule_runner: RunFromText) -> None: + """The task sets no_log to true.""" + results = rule_runner.run_playbook(NO_LOG_TRUE) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_no_log_password_lock_yes(rule_runner: RunFromText) -> None: + """The task only locks the user.""" + results = rule_runner.run_playbook(PASSWORD_LOCK_YES) + assert len(results) == 0 + + @pytest.mark.parametrize( + "rule_runner", (NoLogPasswordsRule,), indirect=["rule_runner"] + ) + def test_password_lock_false(rule_runner: RunFromText) -> None: + """The task does not actually lock the user.""" + results = rule_runner.run_playbook(PASSWORD_LOCK_FALSE) + assert len(results) == 0 diff --git a/src/ansiblelint/rules/no_prompting.md b/src/ansiblelint/rules/no_prompting.md new file mode 100644 index 0000000..7e525c8 --- /dev/null +++ b/src/ansiblelint/rules/no_prompting.md @@ -0,0 +1,35 @@ +# no-prompting + +This rule checks for `vars_prompt` or the `ansible.builtin.pause` module in playbooks. +You should enable this rule to ensure that playbooks can run unattended and in CI/CD pipelines. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - no-prompting +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + vars_prompt: # <- Prompts the user to input credentials. + - name: username + prompt: What is your username? + private: false + + - name: password + prompt: What is your password? + tasks: + - name: Pause for 5 minutes + ansible.builtin.pause: + minutes: 5 # <- Pauses playbook execution for a set period of time. +``` + +## Correct Code + +Correct code for this rule is to omit `vars_prompt` and the `ansible.builtin.pause` module from your playbook. diff --git a/src/ansiblelint/rules/no_prompting.py b/src/ansiblelint/rules/no_prompting.py new file mode 100644 index 0000000..27893d1 --- /dev/null +++ b/src/ansiblelint/rules/no_prompting.py @@ -0,0 +1,73 @@ +"""Implementation of no-prompting rule.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +class NoPromptingRule(AnsibleLintRule): + """Disallow prompting.""" + + id = "no-prompting" + description = ( + "Disallow the use of vars_prompt or ansible.builtin.pause to better" + "accommodate unattended playbook runs and use in CI pipelines." + ) + tags = ["opt-in"] + severity = "VERY_LOW" + version_added = "v6.0.3" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + # If the Play uses the 'vars_prompt' section to set variables + + if file.kind != "playbook": # pragma: no cover + return [] + + vars_prompt = data.get("vars_prompt", None) + if not vars_prompt: + return [] + return [ + self.create_matcherror( + message="Play uses vars_prompt", + linenumber=vars_prompt[0][LINE_NUMBER_KEY], + filename=file, + ) + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for ansible.builtin.pause tasks.""" + # We do not want to trigger this rule if pause has either seconds or + # minutes defined, as that does not make it blocking. + return task["action"]["__ansible_module_original__"] in [ + "pause", + "ansible.builtin.pause", + ] and not ( + task["action"].get("minutes", None) or task["action"].get("seconds", None) + ) + + +if "pytest" in sys.modules: + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_no_prompting_fail() -> None: + """Negative test for no-prompting.""" + # For testing we want to manually enable opt-in rules + options.enable_list = ["no-prompting"] + rules = RulesCollection(options=options) + rules.register(NoPromptingRule()) + results = Runner("examples/playbooks/rule-no-prompting.yml", rules=rules).run() + assert len(results) == 2 + for result in results: + assert result.rule.id == "no-prompting" diff --git a/src/ansiblelint/rules/no_relative_paths.md b/src/ansiblelint/rules/no_relative_paths.md new file mode 100644 index 0000000..568a145 --- /dev/null +++ b/src/ansiblelint/rules/no_relative_paths.md @@ -0,0 +1,94 @@ +# no-relative-paths + +This rule checks for relative paths in the `ansible.builtin.copy` and +`ansible.builtin.template` modules. + +Relative paths in a task most often direct Ansible to remote files and +directories on managed nodes. In the `ansible.builtin.copy` and +`ansible.builtin.template` modules, the `src` argument refers to local files and +directories on the control node. + +The recommended locations to store files are as follows: + +- Use the `files/` folder in the playbook or role directory for the `copy` + module. +- Use the `templates/` folder in the playbook or role directory for the + `template` module. + +These folders allow you to omit the path or use a sub-folder when specifying +files with the `src` argument. + +!!! note + + If resources are outside your Ansible playbook or role directory you should use an absolute path with the `src` argument. + +!!! warning + + Do not store resources at the same directory level as your Ansible playbook or tasks files. + Doing this can result in disorganized projects and cause user confusion when distinguishing between resources of the same type, such as YAML. + +See +[task paths](https://docs.ansible.com/ansible/latest/playbook_guide/playbook_pathing.html#task-paths) +in the Ansible documentation for more information. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Template a file to /etc/file.conf + ansible.builtin.template: + src: ../my_templates/foo.j2 # <- Uses a relative path in the src argument. + dest: /etc/file.conf + owner: bin + group: wheel + mode: "0644" +``` + +```yaml +- name: Example playbook + hosts: all + vars: + source_path: ../../my_templates/foo.j2 # <- Sets a variable to a relative path. + tasks: + - name: Copy a file to /etc/file.conf + ansible.builtin.copy: + src: "{{ source_path }}" # <- Uses the variable in the src argument. + dest: /etc/foo.conf + owner: foo + group: foo + mode: "0644" +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Template a file to /etc/file.conf + ansible.builtin.template: + src: foo.j2 # <- Uses a path from inside templates/ directory. + dest: /etc/file.conf + owner: bin + group: wheel + mode: "0644" +``` + +```yaml +- name: Example playbook + hosts: all + vars: + source_path: foo.j2 # <- Uses a path from inside files/ directory. + tasks: + - name: Copy a file to /etc/file.conf + ansible.builtin.copy: + src: "{{ source_path }}" # <- Uses the variable in the src argument. + dest: /etc/foo.conf + owner: foo + group: foo + mode: "0644" +``` diff --git a/src/ansiblelint/rules/no_relative_paths.py b/src/ansiblelint/rules/no_relative_paths.py new file mode 100644 index 0000000..16ea51b --- /dev/null +++ b/src/ansiblelint/rules/no_relative_paths.py @@ -0,0 +1,45 @@ +"""Implementation of no-relative-paths rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RoleRelativePath(AnsibleLintRule): + """The src argument should not use a relative path.""" + + id = "no-relative-paths" + description = "The ``copy`` and ``template`` modules should not use relative path for ``src``." + severity = "HIGH" + tags = ["idiom"] + version_added = "v4.0.0" + + _module_to_path_folder = { + "copy": "files", + "win_copy": "files", + "template": "templates", + "win_template": "win_templates", + } + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + if module not in self._module_to_path_folder: + return False + + if "src" not in task["action"]: + return False + + path_to_check = f"../{self._module_to_path_folder[module]}" + if path_to_check in task["action"]["src"]: + return True + + return False diff --git a/src/ansiblelint/rules/no_same_owner.md b/src/ansiblelint/rules/no_same_owner.md new file mode 100644 index 0000000..350a3d4 --- /dev/null +++ b/src/ansiblelint/rules/no_same_owner.md @@ -0,0 +1,55 @@ +# no-same-owner + +This rule checks that the owner and group do not transfer across hosts. + +In many cases the owner and group on remote hosts do not match the owner and group assigned to source files. +Preserving the owner and group during transfer can result in errors with permissions or leaking sensitive information. + +When you synchronize files, you should avoid transferring the owner and group by setting `owner: false` and `group: false` arguments. +When you unpack archives with the `ansible.builtin.unarchive` module you should set the `--no-same-owner` option. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - no-same-owner +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Synchronize conf file + ansible.posix.synchronize: + src: /path/conf.yaml + dest: /path/conf.yaml # <- Transfers the owner and group for the file. + - name: Extract tarball to path + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: /my/path/ # <- Transfers the owner and group for the file. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Synchronize conf file + ansible.posix.synchronize: + src: /path/conf.yaml + dest: /path/conf.yaml + owner: false + group: false # <- Does not transfer the owner and group for the file. + - name: Extract tarball to path + ansible.builtin.unarchive: + src: "{{ file }}.tar.gz" + dest: /my/path/ + extra_opts: + - --no-same-owner # <- Does not transfer the owner and group for the file. +``` diff --git a/src/ansiblelint/rules/no_same_owner.py b/src/ansiblelint/rules/no_same_owner.py new file mode 100644 index 0000000..77c1b40 --- /dev/null +++ b/src/ansiblelint/rules/no_same_owner.py @@ -0,0 +1,104 @@ +"""Optional rule for avoiding keeping owner/group when transferring files.""" +from __future__ import annotations + +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansible.utils.sentinel import Sentinel + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoSameOwnerRule(AnsibleLintRule): + """Do not preserve the owner and group when transferring files across hosts.""" + + id = "no-same-owner" + description = """ +Optional rule that highlights dangers of assuming that user/group on the remote +machines may not exist on ansible controller or vice versa. Owner and group +should not be preserved when transferring files between them. +""" + severity = "LOW" + tags = ["opt-in"] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Return matches for a task.""" + action = task.get("action") + if not isinstance(action, dict): + return False + + module = action["__ansible_module__"] + + if module in ["synchronize", "ansible.posix.synchronize"]: + return self.handle_synchronize(task, action) + + if module in ["unarchive", "ansible.builtin.unarchive"]: + return self.handle_unarchive(task, action) + + return False + + @staticmethod + def handle_synchronize(task: Any, action: dict[str, Any]) -> bool: + """Process a synchronize task.""" + if task.get("delegate_to") != Sentinel: + return False + + archive = action.get("archive", True) + if action.get("owner", archive) or action.get("group", archive): + return True + return False + + @staticmethod + def handle_unarchive(task: Any, action: dict[str, Any]) -> bool: + """Process unarchive task.""" + delegate_to = task.get("delegate_to") + if ( + delegate_to == "localhost" + or delegate_to != "localhost" + and not action.get("remote_src") + ): + src = action.get("src") + if not isinstance(src, str): + return False + + if src.endswith("zip"): + if "-X" in action.get("extra_opts", []): + return True + if re.search(r".*\.tar(\.(gz|bz2|xz))?$", src): + if "--no-same-owner" not in action.get("extra_opts", []): + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failures"), + ( + pytest.param( + "examples/roles/role_for_no_same_owner/tasks/fail.yml", 12, id="fail" + ), + pytest.param( + "examples/roles/role_for_no_same_owner/tasks/pass.yml", 0, id="pass" + ), + ), + ) + def test_no_same_owner_rule( + default_rules_collection: RulesCollection, test_file: str, failures: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + assert len(results) == failures + for result in results: + assert result.message == NoSameOwnerRule().shortdesc diff --git a/src/ansiblelint/rules/no_tabs.md b/src/ansiblelint/rules/no_tabs.md new file mode 100644 index 0000000..7895122 --- /dev/null +++ b/src/ansiblelint/rules/no_tabs.md @@ -0,0 +1,38 @@ +# no-tabs + +This rule checks for the tab character. The `\t` tab character can result in +unexpected display or formatting issues. You should always use spaces instead of +tabs. + +!!! note + + This rule does not trigger alerts for tab characters in the ``ansible.builtin.lineinfile`` module. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Do not trigger the rule + ansible.builtin.lineinfile: + path: some.txt + regexp: '^\t$' + line: 'string with \t inside' + - name: Trigger the rule with a debug message + ansible.builtin.debug: + msg: "Using the \t character can cause formatting issues." # <- Includes the tab character. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Do not trigger the no-tabs rule + ansible.builtin.debug: + msg: "Using space characters avoids formatting issues." +``` diff --git a/src/ansiblelint/rules/no_tabs.py b/src/ansiblelint/rules/no_tabs.py new file mode 100644 index 0000000..c57e352 --- /dev/null +++ b/src/ansiblelint/rules/no_tabs.py @@ -0,0 +1,63 @@ +"""Implementation of no-tabs rule.""" +# Copyright (c) 2016, Will Thames and contributors +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import nested_items_path + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class NoTabsRule(AnsibleLintRule): + """Most files should not contain tabs.""" + + id = "no-tabs" + description = "Tabs can cause unexpected display issues, use spaces" + severity = "LOW" + tags = ["formatting"] + version_added = "v4.0.0" + allow_list = [ + ("lineinfile", "insertafter"), + ("lineinfile", "insertbefore"), + ("lineinfile", "regexp"), + ("lineinfile", "line"), + ("ansible.builtin.lineinfile", "insertafter"), + ("ansible.builtin.lineinfile", "insertbefore"), + ("ansible.builtin.lineinfile", "regexp"), + ("ansible.builtin.lineinfile", "line"), + ("ansible.legacy.lineinfile", "insertafter"), + ("ansible.legacy.lineinfile", "insertbefore"), + ("ansible.legacy.lineinfile", "regexp"), + ("ansible.legacy.lineinfile", "line"), + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + action = task["action"]["__ansible_module__"] + for k, v, _ in nested_items_path(task): + if isinstance(k, str) and "\t" in k: + return True + if isinstance(v, str) and "\t" in v and (action, k) not in self.allow_list: + return True + return False + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_no_tabs_rule(default_rules_collection: RulesCollection) -> None: + """Test rule matches.""" + results = Runner( + "examples/playbooks/rule-no-tabs.yml", rules=default_rules_collection + ).run() + assert results[0].linenumber == 10 + assert results[0].message == NoTabsRule().shortdesc + assert len(results) == 1 diff --git a/src/ansiblelint/rules/only_builtins.md b/src/ansiblelint/rules/only_builtins.md new file mode 100644 index 0000000..750e194 --- /dev/null +++ b/src/ansiblelint/rules/only_builtins.md @@ -0,0 +1,36 @@ +# only-builtins + +This rule checks that playbooks use actions from the `ansible.builtin` collection only. + +This is an opt-in rule. +You must enable it in your Ansible-lint configuration as follows: + +```yaml +enable_list: + - only-builtins +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: all + tasks: + - name: Deploy a Helm chart for Prometheus + kubernetes.core.helm: # <- Uses a non-builtin collection. + name: test + chart_ref: stable/prometheus + release_namespace: monitoring + create_namespace: true +``` + +## Correct Code + +```yaml +- name: Example playbook + hosts: localhost + tasks: + - name: Run a shell command + ansible.builtin.shell: echo This playbook uses actions from the builtin collection only. +``` diff --git a/src/ansiblelint/rules/only_builtins.py b/src/ansiblelint/rules/only_builtins.py new file mode 100644 index 0000000..d84b9d7 --- /dev/null +++ b/src/ansiblelint/rules/only_builtins.py @@ -0,0 +1,94 @@ +"""Rule definition for usage of builtin actions only.""" +from __future__ import annotations + +import sys +from typing import Any + +from ansiblelint.config import options +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.rules.fqcn import builtins +from ansiblelint.skip_utils import is_nested_task + + +class OnlyBuiltinsRule(AnsibleLintRule): + """Use only builtin actions.""" + + id = "only-builtins" + severity = "MEDIUM" + description = "Check whether the playbook uses anything but ``ansible.builtin``" + tags = ["opt-in", "experimental"] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module_original__"] + + allowed_collections = [ + "ansible.builtin", + "ansible.legacy", + ] + options.only_builtins_allow_collections + allowed_modules = builtins + options.only_builtins_allow_modules + + is_allowed = ( + any(module.startswith(f"{prefix}.") for prefix in allowed_collections) + or module in allowed_modules + ) + + return not is_allowed and not is_nested_task(task) + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + # pylint: disable=ungrouped-imports + import pytest + + from ansiblelint.constants import SUCCESS_RC, VIOLATIONS_FOUND_RC + from ansiblelint.testing import RunFromText, run_ansible_lint + + SUCCESS_PLAY = """ +- hosts: localhost + tasks: + - name: A block + block: + - name: Shell (fqcn) + ansible.builtin.shell: echo This rule should not get matched by the only-builtins rule + - name: Command with legacy FQCN + ansible.legacy.command: echo This rule should not get matched by the only-builtins rule + """ + + def test_only_builtins_fail() -> None: + """Test rule matches.""" + result = run_ansible_lint( + "--strict", + "--warn-list=", + "--enable-list", + "only-builtins", + "examples/playbooks/rule-only-builtins.yml", + ) + assert result.returncode == VIOLATIONS_FOUND_RC + assert "Failed" in result.stderr + assert "warning(s)" in result.stderr + assert "only-builtins: Use only builtin actions" in result.stdout + + def test_only_builtins_allow() -> None: + """Test rule doesn't match.""" + conf_path = "examples/playbooks/.ansible-lint-only-builtins-allow" + result = run_ansible_lint( + f"--config-file={conf_path}", + "--strict", + "--warn-list=", + "--enable-list", + "only-builtins", + "examples/playbooks/rule-only-builtins.yml", + ) + assert "only-builtins" not in result.stdout + assert result.returncode == SUCCESS_RC + + @pytest.mark.parametrize( + "rule_runner", (OnlyBuiltinsRule,), indirect=["rule_runner"] + ) + def test_only_builtin_pass(rule_runner: RunFromText) -> None: + """Test rule does not match.""" + results = rule_runner.run_playbook(SUCCESS_PLAY) + assert len(results) == 0, results diff --git a/src/ansiblelint/rules/package_latest.md b/src/ansiblelint/rules/package_latest.md new file mode 100644 index 0000000..c7e0d82 --- /dev/null +++ b/src/ansiblelint/rules/package_latest.md @@ -0,0 +1,71 @@ +# package-latest + +This rule checks that package managers install software in a controlled, safe manner. + +Package manager modules, such as `ansible.builtin.yum`, include a `state` parameter that configures how Ansible installs software. +In production environments, you should set `state` to `present` and specify a target version to ensure that packages are installed to a planned and tested version. + +Setting `state` to `latest` not only installs software, it performs an update and installs additional packages. +This can result in performance degradation or loss of service. +If you do want to update packages to the latest version, you should also set the `update_only` parameter to `true` to avoid installing additional packages. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Install Ansible + ansible.builtin.yum: + name: ansible + state: latest # <- Installs the latest package. + + - name: Install Ansible-lint + ansible.builtin.pip: + name: ansible-lint + args: + state: latest # <- Installs the latest package. + + - name: Install some-package + ansible.builtin.package: + name: some-package + state: latest # <- Installs the latest package. + + - name: Install Ansible with update_only to false + ansible.builtin.yum: + name: sudo + state: latest + update_only: false # <- Updates and installs packages. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Install Ansible + ansible.builtin.yum: + name: ansible-2.12.7.0 + state: present # <- Pins the version to install with yum. + + - name: Install Ansible-lint + ansible.builtin.pip: + name: ansible-lint + args: + state: present + version: 5.4.0 # <- Pins the version to install with pip. + + - name: Install some-package + ansible.builtin.package: + name: some-package + state: present # <- Ensures the package is installed. + + - name: Update Ansible with update_only to true + ansible.builtin.yum: + name: sudo + state: latest + update_only: true # <- Updates but does not install additional packages. +``` diff --git a/src/ansiblelint/rules/package_latest.py b/src/ansiblelint/rules/package_latest.py new file mode 100644 index 0000000..dd3074a --- /dev/null +++ b/src/ansiblelint/rules/package_latest.py @@ -0,0 +1,82 @@ +"""Implementations of the package-latest rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class PackageIsNotLatestRule(AnsibleLintRule): + """Package installs should not use latest.""" + + id = "package-latest" + description = ( + "Package installs should use ``state=present`` with or without a version" + ) + severity = "VERY_LOW" + tags = ["idempotency"] + version_added = "historic" + + _package_managers = [ + # spell-checker: disable + "apk", + "apt", + "bower", + "bundler", + "dnf", + "easy_install", + "gem", + "homebrew", + "jenkins_plugin", + "npm", + "openbsd_package", + "openbsd_pkg", + "package", + "pacman", + "pear", + "pip", + "pkg5", + "pkgutil", + "portage", + "slackpkg", + "sorcery", + "swdepot", + "win_chocolatey", + "yarn", + "yum", + "zypper", + # spell-checker: enable + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + return ( + task["action"]["__ansible_module__"] in self._package_managers + and not task["action"].get("version") + and not task["action"].get("update_only") + and task["action"].get("state") == "latest" + ) diff --git a/src/ansiblelint/rules/partial_become.md b/src/ansiblelint/rules/partial_become.md new file mode 100644 index 0000000..01f9dae --- /dev/null +++ b/src/ansiblelint/rules/partial_become.md @@ -0,0 +1,42 @@ +# partial-become + +This rule checks that privilege escalation is activated when changing users. + +To perform an action as a different user with the `become_user` directive, you +must set `become: true`. + +!!! warning + + While Ansible inherits have of `become` and `become_user` from upper levels, + like play level or command line, we do not look at these values. This rule + requires you to be explicit and always define both in the same place, mainly + in order to prevent accidents when some tasks are moved from one location to + another one. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Start the httpd service as the apache user + ansible.builtin.service: + name: httpd + state: started + become_user: apache # <- Does not change the user because "become: true" is not set. +``` + +## Correct Code + +```yaml +- name: Example playbook + hosts: localhost + tasks: + - name: Start the httpd service as the apache user + ansible.builtin.service: + name: httpd + state: started + become: true # <- Activates privilege escalation. + become_user: apache # <- Changes the user with the desired privileges. +``` diff --git a/src/ansiblelint/rules/partial_become.py b/src/ansiblelint/rules/partial_become.py new file mode 100644 index 0000000..f3a3f72 --- /dev/null +++ b/src/ansiblelint/rules/partial_become.py @@ -0,0 +1,133 @@ +"""Implementation of partial-become rule.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import sys +from functools import reduce +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + from ansiblelint.file_utils import Lintable + + +def _get_subtasks(data: dict[str, Any]) -> list[Any]: + result: list[Any] = [] + block_names = [ + "tasks", + "pre_tasks", + "post_tasks", + "handlers", + "block", + "always", + "rescue", + ] + for name in block_names: + if data and name in data: + result += data[name] or [] + return result + + +def _nested_search(term: str, data: dict[str, Any]) -> Any: + if data and term in data: + return True + return reduce( + (lambda x, y: x or _nested_search(term, y)), _get_subtasks(data), False + ) + + +def _become_user_without_become(becomeuserabove: bool, data: dict[str, Any]) -> Any: + if "become" in data: + # If become is in lineage of tree then correct + return False + if "become_user" in data and _nested_search("become", data): + # If 'become_user' on tree and become somewhere below + # we must check for a case of a second 'become_user' without a + # 'become' in its lineage + subtasks = _get_subtasks(data) + return reduce( + (lambda x, y: x or _become_user_without_become(False, y)), subtasks, False + ) + if _nested_search("become_user", data): + # Keep searching down if 'become_user' exists in the tree below current task + subtasks = _get_subtasks(data) + return len(subtasks) == 0 or reduce( + ( + lambda x, y: x + or _become_user_without_become( + becomeuserabove or "become_user" in data, y + ) + ), + subtasks, + False, + ) + # If at bottom of tree, flag up if 'become_user' existed in the lineage of the tree and + # 'become' was not. This is an error if any lineage has a 'become_user' but no become + return becomeuserabove + + +class BecomeUserWithoutBecomeRule(AnsibleLintRule): + """become_user requires become to work as expected.""" + + id = "partial-become" + description = "``become_user`` without ``become`` will not actually change user" + severity = "VERY_HIGH" + tags = ["unpredictability"] + version_added = "historic" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + if file.kind == "playbook": + result = _become_user_without_become(False, data) + if result: + return [ + self.create_matcherror( + message=self.shortdesc, + filename=file, + linenumber=data[LINE_NUMBER_KEY], + ) + ] + return [] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + def test_partial_become_positive() -> None: + """Positive test for partial-become.""" + collection = RulesCollection() + collection.register(BecomeUserWithoutBecomeRule()) + success = "examples/playbooks/rule-partial-become-without-become-pass.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + def test_partial_become_negative() -> None: + """Negative test for partial-become.""" + collection = RulesCollection() + collection.register(BecomeUserWithoutBecomeRule()) + failure = "examples/playbooks/rule-partial-become-without-become-fail.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 3 diff --git a/src/ansiblelint/rules/playbook_extension.md b/src/ansiblelint/rules/playbook_extension.md new file mode 100644 index 0000000..dd0e475 --- /dev/null +++ b/src/ansiblelint/rules/playbook_extension.md @@ -0,0 +1,14 @@ +# playbook-extension + +This rule checks the file extension for playbooks is either `.yml` or `.yaml`. +Ansible playbooks are expressed in YAML format with minimal syntax. + +The [YAML syntax](https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html#yaml-syntax) reference provides additional detail. + +## Problematic Code + +This rule is triggered if Ansible playbooks do not have a file extension or use an unsupported file extension such as `playbook.json` or `playbook.xml`. + +## Correct Code + +Save Ansible playbooks as valid YAML with the `.yml` or `.yaml` file extension. diff --git a/src/ansiblelint/rules/playbook_extension.py b/src/ansiblelint/rules/playbook_extension.py new file mode 100644 index 0000000..491b1fc --- /dev/null +++ b/src/ansiblelint/rules/playbook_extension.py @@ -0,0 +1,53 @@ +"""Implementation of playbook-extension rule.""" +# Copyright (c) 2016, Tsukinowa Inc. <info@tsukinowa.jp> +# Copyright (c) 2018, Ansible Project +from __future__ import annotations + +import os +import sys + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.runner import Runner + + +class PlaybookExtensionRule(AnsibleLintRule): + """Use ".yml" or ".yaml" playbook extension.""" + + id = "playbook-extension" + description = 'Playbooks should have the ".yml" or ".yaml" extension' + severity = "MEDIUM" + tags = ["formatting"] + done: list[str] = [] + version_added = "v4.0.0" + + def matchyaml(self, file: Lintable) -> list[MatchError]: + result: list[MatchError] = [] + if file.kind != "playbook": + return result + path = str(file.path) + ext = os.path.splitext(path) + if ext[1] not in [".yml", ".yaml"] and path not in self.done: + self.done.append(path) + result.append(self.create_matcherror(filename=file)) + return result + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + (pytest.param("examples/playbooks/play-without-extension", 1, id="fail"),), + ) + def test_playbook_extension(file: str, expected: int) -> None: + """The ini_file module does not accept preserve mode.""" + rules = RulesCollection() + rules.register(PlaybookExtensionRule()) + results = Runner(Lintable(file, kind="playbook"), rules=rules).run() + assert len(results) == expected + for result in results: + assert result.tag == "playbook-extension" diff --git a/src/ansiblelint/rules/risky_file_permissions.md b/src/ansiblelint/rules/risky_file_permissions.md new file mode 100644 index 0000000..a04e12d --- /dev/null +++ b/src/ansiblelint/rules/risky_file_permissions.md @@ -0,0 +1,57 @@ +# risky-file-permissions + +This rule is triggered by various modules that could end up creating new files +on disk with permissions that might be too open, or unpredictable. Please read +the documentation of each module carefully to understand the implications of +using different argument values, as these make the difference between using the +module safely or not. The fix depends on each module and also your particular +situation. + +Some modules have a `create` argument that defaults to `true`. For those you +either need to set `create: false` or provide some permissions like `mode: 0600` +to make the behavior predictable and not dependent on the current system +settings. + +Modules that are checked: + +- [`ansible.builtin.assemble`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/assemble_module.html) +- [`ansible.builtin.copy`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html) +- [`ansible.builtin.file`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html) +- [`ansible.builtin.get_url`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/get_url_module.html) +- [`ansible.builtin.replace`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/replace_module.html) +- [`ansible.builtin.template`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/template_module.html) +- [`community.general.archive`](https://docs.ansible.com/ansible/latest/collections/community/general/archive_module.html) +- [`community.general.ini_file`](https://docs.ansible.com/ansible/latest/collections/community/general/ini_file_module.html) + +!!! warning + + This rule does not take [module_defaults](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_module_defaults.html) configuration into account. + There are currently no plans to implement this feature because changing task location can also change task behavior. + +## Problematic code + +```yaml +--- +- name: Unsafe example of using ini_file + community.general.ini_file: + path: foo + create: true + mode: preserve +``` + +## Correct code + +```yaml +--- +- name: Safe example of using ini_file (1st solution) + community.general.ini_file: + path: foo + create: false # prevents creating a file with potentially insecure permissions + mode: preserve + +- name: Safe example of using ini_file (2nd solution) + community.general.ini_file: + path: foo + mode: 0600 # explicitly sets the desired permissions, to make the results predictable + mode: preserve +``` diff --git a/src/ansiblelint/rules/risky_file_permissions.py b/src/ansiblelint/rules/risky_file_permissions.py new file mode 100644 index 0000000..b689315 --- /dev/null +++ b/src/ansiblelint/rules/risky_file_permissions.py @@ -0,0 +1,156 @@ +# Copyright (c) 2020 Sorin Sbarnea <sorin.sbarnea@gmail.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""MissingFilePermissionsRule used with ansible-lint.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +# Despite documentation mentioning 'preserve' only these modules support it: +_modules_with_preserve = ( + "copy", + "template", +) + +_MODULES: set[str] = { + "archive", + "community.general.archive", + "assemble", + "ansible.builtin.assemble", + "copy", # supports preserve + "ansible.builtin.copy", + "file", + "ansible.builtin.file", + "get_url", + "ansible.builtin.get_url", + "replace", # implicit preserve behavior but mode: preserve is invalid + "ansible.builtin.replace", + "template", # supports preserve + "ansible.builtin.template", + # 'unarchive', # disabled because .tar.gz files can have permissions inside +} + +_MODULES_WITH_CREATE: dict[str, bool] = { + "blockinfile": False, + "ansible.builtin.blockinfile": False, + "htpasswd": True, + "community.general.htpasswd": True, + "ini_file": True, + "community.general.ini_file": True, + "lineinfile": False, + "ansible.builtin.lineinfile": False, +} + + +class MissingFilePermissionsRule(AnsibleLintRule): + """File permissions unset or incorrect.""" + + id = "risky-file-permissions" + description = ( + "Missing or unsupported mode parameter can cause unexpected file " + "permissions based " + "on version of Ansible being used. Be explicit, like `mode: 0644` to " + "avoid hitting this rule. Special `preserve` value is accepted " + f"only by {', '.join([f'`{x}`' for x in _modules_with_preserve])} modules." + ) + link = "https://github.com/ansible/ansible/issues/71200" + severity = "VERY_HIGH" + tags = ["unpredictability"] + version_added = "v4.3.0" + + _modules = _MODULES + _modules_with_create = _MODULES_WITH_CREATE + + # pylint: disable=too-many-return-statements + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + module = task["action"]["__ansible_module__"] + mode = task["action"].get("mode", None) + + if module not in self._modules and module not in self._modules_with_create: + return False + + if mode == "preserve" and module not in _modules_with_preserve: + return True + + if module in self._modules_with_create: + create = task["action"].get("create", self._modules_with_create[module]) + return create and mode is None + + # A file that doesn't exist cannot have a mode + if task["action"].get("state", None) == "absent": + return False + + # A symlink always has mode 0777 + if task["action"].get("state", None) == "link": + return False + + # Recurse on a directory does not allow for an uniform mode + if task["action"].get("recurse", None): + return False + + # The file module does not create anything when state==file (default) + if module == "file" and task["action"].get("state", "file") == "file": + return False + + # replace module is the only one that has a valid default preserve + # behavior, but we want to trigger rule if user used incorrect + # documentation and put 'preserve', which is not supported. + if module == "replace" and mode is None: + return False + + return mode is None + + +if "pytest" in sys.modules: # noqa: C901 + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.testing import RunFromText # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param( + "examples/playbooks/rule-risky-file-permissions-pass.yml", 0, id="pass" + ), + pytest.param( + "examples/playbooks/rule-risky-file-permissions-fail.yml", + 11, + id="fails", + ), + ), + ) + def test_risky_file_permissions(file: str, expected: int) -> None: + """The ini_file module does not accept preserve mode.""" + collection = RulesCollection() + collection.register(MissingFilePermissionsRule()) + runner = RunFromText(collection) + results = runner.run(file) + assert len(results) == expected + for result in results: + assert result.tag == "risky-file-permissions" diff --git a/src/ansiblelint/rules/risky_octal.md b/src/ansiblelint/rules/risky_octal.md new file mode 100644 index 0000000..a2f22eb --- /dev/null +++ b/src/ansiblelint/rules/risky_octal.md @@ -0,0 +1,49 @@ +# risky-octal + +This rule checks that octal file permissions are strings that contain a leading +zero or are written in +[symbolic modes](https://www.gnu.org/software/findutils/manual/html_node/find_html/Symbolic-Modes.html), +such as `u+rwx` or `u=rw,g=r,o=r`. + +Using integers or octal values in YAML can result in unexpected behavior. For +example, the YAML loader interprets `0644` as the decimal number `420` but +putting `644` there will produce very different results. + +Modules that are checked: + +- [`ansible.builtin.assemble`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/assemble_module.html) +- [`ansible.builtin.copy`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/copy_module.html) +- [`ansible.builtin.file`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html) +- [`ansible.builtin.replace`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/replace_module.html) +- [`ansible.builtin.template`](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/template_module.html) + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Unsafe example of declaring Numeric file permissions + ansible.builtin.file: + path: /etc/foo.conf + owner: foo + group: foo + mode: 644 +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Safe example of declaring Numeric file permissions (1st solution) + ansible.builtin.file: + path: /etc/foo.conf + owner: foo + group: foo + mode: "0644" # <- quoting and the leading zero will prevent surprises + # "0o644" is also a valid alternative. +``` diff --git a/src/ansiblelint/rules/risky_octal.py b/src/ansiblelint/rules/risky_octal.py new file mode 100644 index 0000000..b0aadac --- /dev/null +++ b/src/ansiblelint/rules/risky_octal.py @@ -0,0 +1,193 @@ +"""Implementation of risky-octal rule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.runner import Runner + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class OctalPermissionsRule(AnsibleLintRule): + """Octal file permissions must contain leading zero or be a string.""" + + id = "risky-octal" + description = ( + "Numeric file permissions without leading zero can behave " + "in unexpected ways." + ) + link = "https://docs.ansible.com/ansible/latest/collections/ansible/builtin/file_module.html" + severity = "VERY_HIGH" + tags = ["formatting"] + version_added = "historic" + + _modules = [ + "assemble", + "copy", + "file", + "ini_file", + "lineinfile", + "replace", + "synchronize", + "template", + "unarchive", + ] + + @staticmethod + def is_invalid_permission(mode: int) -> bool: + """Check if permissions are valid. + + Sensible file permission modes don't have write bit set when read bit + is not set and don't have execute bit set when user execute bit is + not set. + + Also, user permissions are more generous than group permissions and + user and group permissions are more generous than world permissions. + """ + other_write_without_read = ( + mode % 8 and mode % 8 < 4 and not (mode % 8 == 1 and (mode >> 6) % 2 == 1) + ) + group_write_without_read = ( + (mode >> 3) % 8 + and (mode >> 3) % 8 < 4 + and not ((mode >> 3) % 8 == 1 and (mode >> 6) % 2 == 1) + ) + user_write_without_read = ( + (mode >> 6) % 8 and (mode >> 6) % 8 < 4 and not (mode >> 6) % 8 == 1 + ) + other_more_generous_than_group = mode % 8 > (mode >> 3) % 8 + other_more_generous_than_user = mode % 8 > (mode >> 6) % 8 + group_more_generous_than_user = (mode >> 3) % 8 > (mode >> 6) % 8 + + return bool( + other_write_without_read + or group_write_without_read + or user_write_without_read + or other_more_generous_than_group + or other_more_generous_than_user + or group_more_generous_than_user + ) + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["action"]["__ansible_module__"] in self._modules: + mode = task["action"].get("mode", None) + + if isinstance(mode, str): + return False + + if isinstance(mode, int) and self.is_invalid_permission(mode): + return f'`mode: {mode}` should have a string value with leading zero `mode: "0{mode:o}"` or use symbolic mode.' + return False + + +if "pytest" in sys.modules: + import pytest + + VALID_MODES = [ + 0o777, + 0o775, + 0o770, + 0o755, + 0o750, + 0o711, + 0o710, + 0o700, + 0o666, + 0o664, + 0o660, + 0o644, + 0o640, + 0o600, + 0o555, + 0o551, + 0o550, + 0o511, + 0o510, + 0o500, + 0o444, + 0o440, + 0o400, + ] + + INVALID_MODES = [ + 777, + 775, + 770, + 755, + 750, + 711, + 710, + 700, + 666, + 664, + 660, + 644, + 640, + 622, + 620, + 600, + 555, + 551, + 550, # 511 == 0o777, 510 == 0o776, 500 == 0o764 + 444, + 440, + 400, + ] + + @pytest.mark.parametrize( + ("file", "failures"), + ( + pytest.param("examples/playbooks/rule-risky-octal-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/rule-risky-octal-fail.yml", 4, id="fail"), + ), + ) + def test_octal(file: str, failures: int) -> None: + """Test that octal permissions are valid.""" + collection = RulesCollection() + collection.register(OctalPermissionsRule()) + results = Runner(file, rules=collection).run() + + assert len(results) == failures + for result in results: + assert result.rule.id == "risky-octal" + + def test_octal_valid_modes() -> None: + """Test that octal modes are valid.""" + rule = OctalPermissionsRule() + for mode in VALID_MODES: + assert not rule.is_invalid_permission( + mode + ), f"0o{mode:o} should be a valid mode" + + def test_octal_invalid_modes() -> None: + """Test that octal modes are invalid.""" + rule = OctalPermissionsRule() + for mode in INVALID_MODES: + assert rule.is_invalid_permission( + mode + ), f"{mode:d} should be an invalid mode" diff --git a/src/ansiblelint/rules/risky_shell_pipe.md b/src/ansiblelint/rules/risky_shell_pipe.md new file mode 100644 index 0000000..0c222a9 --- /dev/null +++ b/src/ansiblelint/rules/risky_shell_pipe.md @@ -0,0 +1,35 @@ +# risky-shell-pipe + +This rule checks for the bash `pipefail` option with the Ansible `shell` module. + +You should always set `pipefail` when piping output from a command to another. +The return status of a pipeline is the exit status of the command. +The `pipefail` option ensures that tasks fail as expected if the first command fails. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + tasks: + - name: Pipeline without pipefail + shell: false | cat +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + become: no + tasks: + - name: Pipeline with pipefail + shell: set -o pipefail && false | cat + + - name: Pipeline with pipefail, multi-line + shell: | + set -o pipefail # <-- adding this will prevent surprises + false | cat +``` diff --git a/src/ansiblelint/rules/risky_shell_pipe.py b/src/ansiblelint/rules/risky_shell_pipe.py new file mode 100644 index 0000000..f766cf1 --- /dev/null +++ b/src/ansiblelint/rules/risky_shell_pipe.py @@ -0,0 +1,53 @@ +"""Implementation of risky-shell-pipe rule.""" +from __future__ import annotations + +import re +from typing import TYPE_CHECKING, Any + +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import convert_to_boolean + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class ShellWithoutPipefail(AnsibleLintRule): + """Shells that use pipes should set the pipefail option.""" + + id = "risky-shell-pipe" + description = ( + "Without the pipefail option set, a shell command that " + "implements a pipeline can fail and still return 0. If " + "any part of the pipeline other than the terminal command " + "fails, the whole pipeline will still return 0, which may " + "be considered a success by Ansible. " + "Pipefail is available in the bash shell." + ) + severity = "MEDIUM" + tags = ["command-shell"] + version_added = "v4.1.0" + + _pipefail_re = re.compile(r"^\s*set.*[+-][A-Za-z]*o\s*pipefail", re.M) + _pipe_re = re.compile(r"(?<!\|)\|(?!\|)") + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + if task["__ansible_action_type__"] != "task": + return False + + if task["action"]["__ansible_module__"] != "shell": + return False + + if task.get("ignore_errors"): + return False + + jinja_stripped_cmd = self.unjinja( + " ".join(task["action"].get("__ansible_arguments__", [])) + ) + + return bool( + self._pipe_re.search(jinja_stripped_cmd) + and not self._pipefail_re.search(jinja_stripped_cmd) + and not convert_to_boolean(task["action"].get("ignore_errors", False)) + ) diff --git a/src/ansiblelint/rules/role_name.md b/src/ansiblelint/rules/role_name.md new file mode 100644 index 0000000..28aa8b8 --- /dev/null +++ b/src/ansiblelint/rules/role_name.md @@ -0,0 +1,36 @@ +# role-name + +This rule checks role names to ensure they conform with requirements. + +Role names must contain only lowercase alphanumeric characters and the underscore `_` character. +Role names must also start with an alphabetic character. + +For more information see the [roles directory](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#roles-directory) topic in Ansible documentation. + +`role-name[path]` message tells you to avoid using paths when importing roles. +You should only rely on Ansible's ability to find the role and refer to them +using fully qualified names. + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + roles: + - 1myrole # <- Does not start with an alphabetic character. + - myrole2[*^ # <- Contains invalid special characters. + - myRole_3 # <- Contains uppercase alphabetic characters. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + roles: + - myrole1 # <- Starts with an alphabetic character. + - myrole2 # <- Contains only alphanumeric characters. + - myrole_3 # <- Contains only lowercase alphabetic characters. +``` diff --git a/src/ansiblelint/rules/role_name.py b/src/ansiblelint/rules/role_name.py new file mode 100644 index 0000000..12989c8 --- /dev/null +++ b/src/ansiblelint/rules/role_name.py @@ -0,0 +1,160 @@ +"""Implementation of role-name rule.""" +# Copyright (c) 2020 Gael Chamoulaud <gchamoul@redhat.com> +# Copyright (c) 2020 Sorin Sbarnea <ssbarnea@redhat.com> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import re +import sys +from functools import cache +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import ROLE_IMPORT_ACTION_NAMES +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.utils import parse_yaml_from_file + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + + +ROLE_NAME_REGEX = re.compile(r"^[a-z][a-z0-9_]*$") + + +def _remove_prefix(text: str, prefix: str) -> str: + return re.sub(rf"^{re.escape(prefix)}", "", text) + + +@cache +def _match_role_name_regex(role_name: str) -> bool: + return ROLE_NAME_REGEX.match(role_name) is not None + + +class RoleNames(AnsibleLintRule): + # Unable to use f-strings due to flake8 bug with AST parsing + """Role name {0} does not match ``^[a-z][a-z0-9_]*$`` pattern.""" + + id = "role-name" + description = ( + "Role names are now limited to contain only lowercase alphanumeric " + "characters, plus underline and start with an alpha character." + ) + link = "https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#roles-directory" + severity = "HIGH" + tags = ["deprecations", "metadata"] + version_added = "v6.8.5" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + results = [] + if task["action"]["__ansible_module__"] in ROLE_IMPORT_ACTION_NAMES: + name = task["action"].get("name", "") + if "/" in name: + results.append( + self.create_matcherror( + f"Avoid using paths when importing roles. ({name})", + filename=file, + linenumber=task["action"].get("__line__", task["__line__"]), + tag=f"{self.id}[path]", + ) + ) + return results + + def matchdir(self, lintable: Lintable) -> list[MatchError]: + return self.matchyaml(lintable) + + def matchyaml(self, file: Lintable) -> list[MatchError]: + result: list[MatchError] = [] + + if file.kind not in ("meta", "role", "playbook"): + return result + + if file.kind == "playbook": + for play in file.data: + if "roles" in play: + line = play["__line__"] + for role in play["roles"]: + if isinstance(role, dict): + line = role["__line__"] + role_name = role["role"] + elif isinstance(role, str): + role_name = role + if "/" in role_name: + result.append( + self.create_matcherror( + f"Avoid using paths when importing roles. ({role_name})", + filename=file, + linenumber=line, + tag=f"{self.id}[path]", + ) + ) + return result + + if file.kind == "role": + role_name = self._infer_role_name( + meta=file.path / "meta" / "main.yml", default=file.path.name + ) + else: + role_name = self._infer_role_name( + meta=file.path, default=file.path.resolve().parents[1].name + ) + + role_name = _remove_prefix(role_name, "ansible-role-") + if role_name and not _match_role_name_regex(role_name): + result.append( + self.create_matcherror( + filename=file, + message=self.shortdesc.format(role_name), + ) + ) + return result + + @staticmethod + def _infer_role_name(meta: Path, default: str) -> str: + if meta.is_file(): + meta_data = parse_yaml_from_file(str(meta)) + if meta_data: + try: + return str(meta_data["galaxy_info"]["role_name"]) + except KeyError: + pass + return default + + +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failure"), + (pytest.param("examples/playbooks/rule-role-name-path.yml", 3, id="fail"),), + ) + def test_role_name_path( + default_rules_collection: RulesCollection, test_file: str, failure: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.tag == "role-name[path]" + assert len(results) == failure diff --git a/src/ansiblelint/rules/run_once.md b/src/ansiblelint/rules/run_once.md new file mode 100644 index 0000000..b7df02c --- /dev/null +++ b/src/ansiblelint/rules/run_once.md @@ -0,0 +1,63 @@ +# run-once + +This rule warns against the use of `run_once` when the `strategy` is set to +`free`. + +This rule can produce the following messages: + +- `run-once[play]`: Play uses `strategy: free`. +- `run-once[task]`: Using `run_once` may behave differently if the `strategy` is + set to `free`. + +For more information see the following topics in Ansible documentation: + +- [free strategy](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/free_strategy.html#free-strategy) +- [selecting a strategy](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_strategies.html#selecting-a-strategy) +- [run_once(playbook keyword) more info](https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html) + +!!! warning + + This rule will always trigger regardless of the value configured inside the 'strategy' field. That is because the effective value used at runtime can be different than the value inside the file. For example, ansible command line arguments can alter it. + +It is perfectly fine to add `# noqa: run-once[task]` to mark the warning as +acknowledged and ignored. + +## Problematic Code + +```yaml +--- +- name: "Example with run_once" + hosts: all + strategy: free # <-- avoid use of strategy as free + gather_facts: false + tasks: + - name: Task with run_once + ansible.builtin.debug: + msg: "Test" + run_once: true # <-- avoid use of strategy as free at play level when using run_once at task level +``` + +## Correct Code + +```yaml +- name: "Example without run_once" + hosts: all + gather_facts: false + tasks: + - name: Task without run_once + ansible.builtin.debug: + msg: "Test" +``` + +```yaml +- name: "Example of using run_once with strategy other than free" + hosts: all + strategy: linear + # strategy: free # noqa: run-once[play] (if using strategy: free can skip it this way) + gather_facts: false + tasks: # <-- use noqa to disable rule violations for specific tasks + - name: Task with run_once # noqa: run-once[task] + ansible.builtin.debug: + msg: "Test" + run_once: true +``` diff --git a/src/ansiblelint/rules/run_once.py b/src/ansiblelint/rules/run_once.py new file mode 100644 index 0000000..f8a059e --- /dev/null +++ b/src/ansiblelint/rules/run_once.py @@ -0,0 +1,87 @@ +"""Optional Ansible-lint rule to warn use of run_once with strategy free.""" +from __future__ import annotations + +import sys +from typing import TYPE_CHECKING, Any + +from ansiblelint.constants import LINE_NUMBER_KEY +from ansiblelint.errors import MatchError +from ansiblelint.rules import AnsibleLintRule + +if TYPE_CHECKING: + from ansiblelint.file_utils import Lintable + + +class RunOnce(AnsibleLintRule): + """Run once should use strategy other than free.""" + + id = "run-once" + link = "https://docs.ansible.com/ansible/latest/reference_appendices/playbooks_keywords.html" + description = "When using run_once, we should avoid using strategy as free." + + tags = ["idiom"] + severity = "MEDIUM" + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + # If the Play uses the 'strategy' and it's value is set to free + + if not file or file.kind != "playbook" or not data: + return [] + + strategy = data.get("strategy", None) + run_once = data.get("run_once", False) + if (not strategy and not run_once) or strategy != "free": + return [] + return [ + self.create_matcherror( + message="Play uses strategy: free", + filename=file, + tag=f"{self.id}[play]", + # pylint: disable=protected-access + linenumber=strategy._line_number, + ) + ] + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for a task.""" + if not file or file.kind != "playbook": + return [] + + run_once = task.get("run_once", False) + if not run_once: + return [] + return [ + self.create_matcherror( + message="Using run_once may behave differently if strategy is set to free.", + filename=file, + tag=f"{self.id}[task]", + linenumber=task[LINE_NUMBER_KEY], + ) + ] + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.rules import RulesCollection # pylint: disable=ungrouped-imports + from ansiblelint.runner import Runner # pylint: disable=ungrouped-imports + + @pytest.mark.parametrize( + ("test_file", "failure"), + ( + pytest.param("examples/playbooks/run-once-pass.yml", 0, id="pass"), + pytest.param("examples/playbooks/run-once-fail.yml", 2, id="fail"), + ), + ) + def test_run_once( + default_rules_collection: RulesCollection, test_file: str, failure: int + ) -> None: + """Test rule matches.""" + results = Runner(test_file, rules=default_rules_collection).run() + for result in results: + assert result.rule.id == RunOnce().id + assert len(results) == failure diff --git a/src/ansiblelint/rules/schema.md b/src/ansiblelint/rules/schema.md new file mode 100644 index 0000000..9327bca --- /dev/null +++ b/src/ansiblelint/rules/schema.md @@ -0,0 +1,80 @@ +# schema + +The `schema` rule validates Ansible metadata files against JSON schemas. These +schemas ensure the compatibility of Ansible syntax content across versions. + +This `schema` rule is **mandatory**. You cannot use inline `noqa` comments to +ignore it. + +Ansible-lint validates the `schema` rule before processing other rules. This +prevents unexpected syntax from triggering multiple rule violations. + +## Validated schema + +Ansible-lint currently validates several schemas that are maintained in separate +projects and updated independently to ansible-lint. + +> Report bugs related to schema in their respective repository and not in the +> ansible-lint project. + +Maintained in the [ansible-lint](https://github.com/ansible/ansible-lint) +project: + +- `schema[ansible-lint-config]` validates + [ansible-lint configuration](https://github.com/ansible/ansible-lint/blob/main/src/ansiblelint/schemas/ansible-lint-config.json) + +Maintained in the +[ansible-navigator](https://github.com/ansible/ansible-navigator) project: + +- `schema[ansible-navigator]` validates + [ansible-navigator configuration](https://github.com/ansible/ansible-navigator/blob/main/src/ansible_navigator/data/ansible-navigator.json) + +- `schema[arg_specs]` validates + [module argument specs](https://docs.ansible.com/ansible/latest/dev_guide/developing_program_flow_modules.html#argument-spec) +- `schema[execution-environment]` validates + [execution environments](https://docs.ansible.com/automation-controller/latest/html/userguide/execution_environments.html) +- `schema[galaxy]` validates + [collection metadata](https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html). +- `schema[inventory]` validates + [inventory files](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html) + that match `inventory/*.yml`. +- `schema[meta-runtime]` validates + [runtime information](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#meta-directory-and-runtime-yml) + that matches `meta/runtime.yml` +- `schema[meta]` validates metadata for roles that match `meta/main.yml`. See + [role-dependencies](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies) + or + [role/metadata.py](https://github.com/ansible/ansible/blob/devel/lib/ansible/playbook/role/metadata.py#L79)) + for details. +- `schema[playbook]` validates Ansible playbooks. +- `schema[requirements]` validates Ansible + [requirements](https://docs.ansible.com/ansible/latest/galaxy/user_guide.html#install-multiple-collections-with-a-requirements-file) + files that match `requirements.yml`. +- `schema[tasks]` validates Ansible task files that match `tasks/**/*.yml`. +- `schema[vars]` validates Ansible + [variables](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html) + that match `vars/*.yml` and `defaults/*.yml`. + +## schema[meta] + +For `meta/main.yml` files, Ansible-lint requires a `galaxy_info.standalone` +property that clarifies if a role is an old standalone one or a new one, +collection based: + +```yaml +galaxy_info: + standalone: true # <-- this is a standalone role (not part of a collection) +``` + +Ansible-lint requires the `standalone` key to avoid confusion and provide more +specific error messages. For example, the `meta` schema will require some +properties only for standalone roles or prevent the use of some properties that +are not supported by collections. + +You cannot use an empty `meta/main.yml` file or use only comments in the +`meta/main.yml` file. + +## schema[moves] + +These errors usually look like "foo was moved to bar in 2.10" and indicate +module moves between Ansible versions. diff --git a/src/ansiblelint/rules/schema.py b/src/ansiblelint/rules/schema.py new file mode 100644 index 0000000..26ba1d9 --- /dev/null +++ b/src/ansiblelint/rules/schema.py @@ -0,0 +1,257 @@ +"""Rule definition for JSON Schema Validations.""" +from __future__ import annotations + +import logging +import sys +from typing import Any + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.schemas import JSON_SCHEMAS, validate_file_schema + +_logger = logging.getLogger(__name__) + + +DESCRIPTION_MD = """ Returned errors will not include exact line numbers, but they will mention +the schema name being used as a tag, like ``schema[playbook]``, +``schema[tasks]``. + +This rule is not skippable and stops further processing of the file. + +If incorrect schema was picked, you might want to either: + +* move the file to standard location, so its file is detected correctly. +* use ``kinds:`` option in linter config to help it pick correct file type. +""" + +pre_checks = { + "task": { + "with_flattened": { + "msg": "with_flattened was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + "with_filetree": { + "msg": "with_filetree was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + "with_cartesian": { + "msg": "with_cartesian was moved to with_community.general.flattened in 2.10", + "tag": "moves", + }, + } +} + + +class ValidateSchemaRule(AnsibleLintRule): + """Perform JSON Schema Validation for known lintable kinds.""" + + description = DESCRIPTION_MD + + id = "schema" + severity = "VERY_HIGH" + tags = ["core"] + version_added = "v6.1.0" + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str | MatchError | list[MatchError]: + result = [] + for key in pre_checks["task"]: + if key in task: + msg = pre_checks["task"][key]["msg"] + tag = pre_checks["task"][key]["tag"] + result.append( + MatchError( + message=msg, + filename=file, + rule=ValidateSchemaRule(), + details=ValidateSchemaRule.description, + tag=f"schema[{tag}]", + ) + ) + return result + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return JSON validation errors found as a list of MatchError(s).""" + result = [] + if file.kind not in JSON_SCHEMAS: + return [] + + errors = validate_file_schema(file) + if errors: + if errors[0].startswith("Failed to load YAML file"): + _logger.debug( + "Ignored failure to load %s for schema validation, as !vault may cause it." + ) + return [] + + result.append( + MatchError( + message=errors[0], + filename=file, + rule=ValidateSchemaRule(), + details=ValidateSchemaRule.description, + tag=f"schema[{file.kind}]", + ) + ) + return result + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + # pylint: disable=ungrouped-imports + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected"), + ( + ( + "examples/collection/galaxy.yml", + "galaxy", + ["'GPL' is not one of"], + ), + ( + "examples/roles/invalid_requirements_schema/meta/requirements.yml", + "requirements", + ["{'foo': 'bar'} is not valid under any of the given schemas"], + ), + ( + "examples/roles/invalid_meta_schema/meta/main.yml", + "meta", + ["False is not of type 'string'"], + ), + ( + "examples/playbooks/vars/invalid_vars_schema.yml", + "vars", + ["'123' does not match any of the regexes"], + ), + ( + "examples/execution-environment.yml", + "execution-environment", + [], + ), + ( + "examples/ee_broken/execution-environment.yml", + "execution-environment", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ("examples/meta/runtime.yml", "meta-runtime", []), + ( + "examples/broken_collection_meta_runtime/meta/runtime.yml", + "meta-runtime", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + "examples/inventory/production.yml", + "inventory", + [], + ), + ( + "examples/inventory/broken_dev_inventory.yml", + "inventory", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + ".ansible-lint", + "ansible-lint-config", + [], + ), + ( + "examples/.config/ansible-lint.yml", + "ansible-lint-config", + [], + ), + ( + "examples/broken/.ansible-lint", + "ansible-lint-config", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ( + "examples/ansible-navigator.yml", + "ansible-navigator-config", + [], + ), + ( + "examples/broken/ansible-navigator.yml", + "ansible-navigator-config", + ["Additional properties are not allowed ('ansible' was unexpected)"], + ), + ( + "examples/roles/hello/meta/argument_specs.yml", + "arg_specs", + [], + ), + ( + "examples/roles/broken_argument_specs/meta/argument_specs.yml", + "arg_specs", + ["Additional properties are not allowed ('foo' was unexpected)"], + ), + ), + ids=( + # "playbook-fail", + "galaxy", + "requirements", + "meta", + "vars", + "ee", + "ee-broken", + "meta-runtime", + "meta-runtime-broken", + "inventory", + "inventory-broken", + "lint-config", + "lint-config2", + "lint-config-broken", + "navigator", + "navigator-broken", + "argspecs", + "argspecs-broken", + ), + ) + def test_schema(file: str, expected_kind: str, expected: list[str]) -> None: + """Validate parsing of ansible output.""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(ValidateSchemaRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == len(expected), results + for idx, result in enumerate(results): + assert result.filename.endswith(file) + assert expected[idx] in result.message + assert result.tag == f"schema[{expected_kind}]" + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected_tag", "count"), + ( + pytest.param( + "examples/playbooks/rule-syntax-moves.yml", + "playbook", + "schema[moves]", + 3, + id="playbook", + ), + ), + ) + def test_schema_moves( + file: str, expected_kind: str, expected_tag: str, count: int + ) -> None: + """Validate ability to detect schema[moves].""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(ValidateSchemaRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == count, results + for result in results: + assert result.filename.endswith(file) + assert result.tag == expected_tag diff --git a/src/ansiblelint/rules/syntax_check.md b/src/ansiblelint/rules/syntax_check.md new file mode 100644 index 0000000..e8197a5 --- /dev/null +++ b/src/ansiblelint/rules/syntax_check.md @@ -0,0 +1,45 @@ +# syntax-check + +Our linter runs `ansible-playbook --syntax-check` on all playbooks, and if any +of these reports a syntax error, this stops any further processing of these +files. + +This error **cannot be disabled** due to being a prerequisite for other steps. +You can exclude these files from linting, but it is better to make sure they can +be loaded by Ansible. This is often achieved by editing the inventory file +and/or `ansible.cfg` so ansible can load required variables. + +If undefined variables cause the failure, you can use the jinja `default()` +filter to provide fallback values, like in the example below. + +This rule is among the few `unskippable` rules that cannot be added to +`skip_list` or `warn_list`. One possible workaround is to add the entire file to +the `exclude_paths`. This is a valid approach for special cases, like testing +fixtures that are invalid on purpose. + +One of the most common sources of errors is a failure to assert the presence of +various variables at the beginning of the playbook. + +This rule can produce messages like below: + +- `syntax-check[empty-playbook]` is raised when a playbook file has no content. + +## Problematic code + +```yaml +--- +- name: + Bad use of variable inside hosts block (wrong assumption of it being + defined) + hosts: "{{ my_hosts }}" + tasks: [] +``` + +## Correct code + +```yaml +--- +- name: Good use of variable inside hosts, without assumptions + hosts: "{{ my_hosts | default([]) }}" + tasks: [] +``` diff --git a/src/ansiblelint/rules/syntax_check.py b/src/ansiblelint/rules/syntax_check.py new file mode 100644 index 0000000..cec94e6 --- /dev/null +++ b/src/ansiblelint/rules/syntax_check.py @@ -0,0 +1,240 @@ +"""Rule definition for ansible syntax check.""" +from __future__ import annotations + +import json +import re +import subprocess +import sys +from dataclasses import dataclass +from typing import Any + +from ansiblelint._internal.rules import BaseRule, RuntimeErrorRule +from ansiblelint.app import get_app +from ansiblelint.config import options +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.logger import timed_info +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.text import strip_ansi_escape + + +@dataclass +class KnownError: + """Class that tracks result of linting.""" + + tag: str + regex: re.Pattern[str] + + +OUTPUT_PATTERNS = ( + KnownError( + tag="missing-file", + regex=re.compile( + # do not use <filename> capture group for this because we want to report original file, not the missing target one + r"(?P<title>Unable to retrieve file contents)\n(?P<details>Could not find or access '(?P<value>.*)'[^\n]*)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), + KnownError( + tag="specific", + regex=re.compile( + r"^ERROR! (?P<title>[^\n]*)\n\nThe error appears to be in '(?P<filename>[\w\/\.\-]+)': line (?P<line>\d+), column (?P<column>\d+)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), + KnownError( + tag="empty-playbook", + regex=re.compile( + "Empty playbook, nothing to do", re.MULTILINE | re.S | re.DOTALL + ), + ), + KnownError( + tag="malformed", + regex=re.compile( + "^ERROR! (?P<title>A malformed block was encountered while loading a block[^\n]*)", + re.MULTILINE | re.S | re.DOTALL, + ), + ), +) + + +class AnsibleSyntaxCheckRule(AnsibleLintRule): + """Ansible syntax check failed.""" + + id = "syntax-check" + severity = "VERY_HIGH" + tags = ["core", "unskippable"] + version_added = "v5.0.0" + _order = 0 + + @staticmethod + # pylint: disable=too-many-locals,too-many-branches + def _get_ansible_syntax_check_matches(lintable: Lintable) -> list[MatchError]: + """Run ansible syntax check and return a list of MatchError(s).""" + default_rule: BaseRule = AnsibleSyntaxCheckRule() + results = [] + if lintable.kind not in ("playbook", "role"): + return [] + + with timed_info( + "Executing syntax check on %s %s", lintable.kind, lintable.path + ): + # To avoid noisy warnings we pass localhost as current inventory: + # [WARNING]: No inventory was parsed, only implicit localhost is available + # [WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all' + if lintable.kind == "playbook": + cmd = [ + "ansible-playbook", + "-i", + "localhost,", + "--syntax-check", + str(lintable.path.expanduser()), + ] + else: # role + cmd = [ + "ansible", + "localhost", + "--syntax-check", + "--module-name=include_role", + "--args", + f"name={str(lintable.path.expanduser())}", + ] + if options.extra_vars: + cmd.extend(["--extra-vars", json.dumps(options.extra_vars)]) + + # To reduce noisy warnings like + # CryptographyDeprecationWarning: Blowfish has been deprecated + # https://github.com/paramiko/paramiko/issues/2038 + env = get_app().runtime.environ.copy() + env["PYTHONWARNINGS"] = "ignore" + + run = subprocess.run( + cmd, + stdin=subprocess.PIPE, + capture_output=True, + shell=False, # needed when command is a list + text=True, + check=False, + env=env, + ) + + if run.returncode != 0: + message = None + filename = lintable + linenumber = 1 + column = None + tag = None + + stderr = strip_ansi_escape(run.stderr) + stdout = strip_ansi_escape(run.stdout) + if stderr: + details = stderr + if stdout: + details += "\n" + stdout + else: + details = stdout + + for pattern in OUTPUT_PATTERNS: + rule = default_rule + match = re.search(pattern.regex, stderr) + if match: + groups = match.groupdict() + title = groups.get("title", match.group(0)) + details = groups.get("details", "") + linenumber = int(groups.get("line", 1)) + + if "filename" in groups: + filename = Lintable(groups["filename"]) + else: + filename = lintable + column = int(groups.get("column", 1)) + results.append( + MatchError( + message=title, + filename=filename, + linenumber=linenumber, + column=column, + rule=rule, + details=details, + tag=f"{rule.id}[{pattern.tag}]", + ) + ) + + if not results: + rule = RuntimeErrorRule() + message = ( + f"Unexpected error code {run.returncode} from " + f"execution of: {' '.join(cmd)}" + ) + results.append( + MatchError( + message=message, + filename=filename, + linenumber=linenumber, + column=column, + rule=rule, + details=details, + tag=tag, + ) + ) + + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + + def test_get_ansible_syntax_check_matches() -> None: + """Validate parsing of ansible output.""" + lintable = Lintable( + "examples/playbooks/conflicting_action.yml", kind="playbook" + ) + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert result[0].linenumber == 4 + assert result[0].column == 7 + assert ( + result[0].message + == "conflicting action statements: ansible.builtin.debug, ansible.builtin.command" + ) + # We internally convert absolute paths returned by ansible into paths + # relative to current directory. + assert result[0].filename.endswith("/conflicting_action.yml") + assert len(result) == 1 + + def test_empty_playbook() -> None: + """Validate detection of empty-playbook.""" + lintable = Lintable("examples/playbooks/empty_playbook.yml", kind="playbook") + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert result[0].linenumber == 1 + # We internally convert absolute paths returned by ansible into paths + # relative to current directory. + assert result[0].filename.endswith("/empty_playbook.yml") + assert result[0].tag == "syntax-check[empty-playbook]" + assert result[0].message == "Empty playbook, nothing to do" + assert len(result) == 1 + + def test_extra_vars_passed_to_command(config_options: Any) -> None: + """Validate `extra-vars` are passed to syntax check command.""" + config_options.extra_vars = { + "foo": "bar", + "complex_variable": ":{;\t$()", + } + lintable = Lintable("examples/playbooks/extra_vars.yml", kind="playbook") + + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + + assert not result + + def test_syntax_check_role() -> None: + """Validate syntax check of a broken role.""" + lintable = Lintable("examples/playbooks/roles/invalid_due_syntax", kind="role") + # pylint: disable=protected-access + result = AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + assert len(result) == 1, result + assert result[0].linenumber == 2 + assert result[0].filename == "examples/roles/invalid_due_syntax/tasks/main.yml" + assert result[0].tag == "syntax-check[specific]" + assert result[0].message == "no module/action detected in task." diff --git a/src/ansiblelint/rules/var_naming.md b/src/ansiblelint/rules/var_naming.md new file mode 100644 index 0000000..60c069d --- /dev/null +++ b/src/ansiblelint/rules/var_naming.md @@ -0,0 +1,47 @@ +# var-naming + +This rule checks variable names to ensure they conform with requirements. + +Variable names must contain only lowercase alphanumeric characters and the +underscore `_` character. Variable names must also start with either an +alphabetic or underscore `_` character. + +For more information see the [creating valid variable names][var-names] topic in +Ansible documentation and [Naming things (Good Practices for Ansible)][cop]. + +## Settings + +This rule behavior can be changed by altering the below settings: + +```yaml +# .ansible-lint +var_naming_pattern: "^[a-z_][a-z0-9_]*$" +``` + +## Problematic Code + +```yaml +--- +- name: Example playbook + hosts: localhost + vars: + CamelCase: true # <- Contains a mix of lowercase and uppercase characters. + ALL_CAPS: bar # <- Contains only uppercase characters. + v@r!able: baz # <- Contains special characters. +``` + +## Correct Code + +```yaml +--- +- name: Example playbook + hosts: localhost + vars: + lowercase: true # <- Contains only lowercase characters. + no_caps: bar # <- Does not contains uppercase characters. + variable: baz # <- Does not contain special characters. +``` + +[cop]: https://redhat-cop.github.io/automation-good-practices/#_naming_things +[var-names]: + https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#creating-valid-variable-names diff --git a/src/ansiblelint/rules/var_naming.py b/src/ansiblelint/rules/var_naming.py new file mode 100644 index 0000000..945a95d --- /dev/null +++ b/src/ansiblelint/rules/var_naming.py @@ -0,0 +1,242 @@ +"""Implementation of var-naming rule.""" +from __future__ import annotations + +import keyword +import re +import sys +from typing import TYPE_CHECKING, Any + +from ansible.parsing.yaml.objects import AnsibleUnicode + +from ansiblelint.config import options +from ansiblelint.constants import LINE_NUMBER_KEY, SUCCESS_RC +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, RulesCollection +from ansiblelint.runner import Runner +from ansiblelint.skip_utils import get_rule_skips_from_line +from ansiblelint.utils import parse_yaml_from_file + +if TYPE_CHECKING: + from ansiblelint.errors import MatchError + +# Should raise var-naming at line [2, 6]. +FAIL_VARS = """--- +CamelCaseIsBad: false # invalid +this_is_valid: # valid because content is a dict, not a variable + CamelCase: ... + ALL_CAPS: ... +ALL_CAPS_ARE_BAD_TOO: ... # invalid +"{{ 'test_' }}var": "value" # valid +CamelCaseButErrorIgnored: true # noqa: var-naming +""" + + +# properties/parameters are prefixed and postfixed with `__` +def is_property(k: str) -> bool: + """Check if key is a property.""" + return k.startswith("__") and k.endswith("__") + + +class VariableNamingRule(AnsibleLintRule): + """All variables should be named using only lowercase and underscores.""" + + id = "var-naming" + severity = "MEDIUM" + tags = ["idiom"] + version_added = "v5.0.10" + needs_raw_task = True + re_pattern = re.compile(options.var_naming_pattern or "^[a-z_][a-z0-9_]*$") + + def is_invalid_variable_name(self, ident: str) -> bool: + """Check if variable name is using right pattern.""" + # Based on https://github.com/ansible/ansible/blob/devel/lib/ansible/utils/vars.py#L235 + if not isinstance(ident, str): + return False + + try: + ident.encode("ascii") + except UnicodeEncodeError: + return False + + if keyword.iskeyword(ident): + return False + + # We want to allow use of jinja2 templating for variable names + if "{{" in ident: + return False + + # previous tests should not be triggered as they would have raised a + # syntax-error when we loaded the files but we keep them here as a + # safety measure. + return not bool(self.re_pattern.match(ident)) + + def matchplay(self, file: Lintable, data: dict[str, Any]) -> list[MatchError]: + """Return matches found for a specific playbook.""" + results: list[MatchError] = [] + raw_results: list[MatchError] = [] + + if not data or file.kind not in ("tasks", "handlers", "playbook", "vars"): + return results + # If the Play uses the 'vars' section to set variables + our_vars = data.get("vars", {}) + for key in our_vars.keys(): + if self.is_invalid_variable_name(key): + raw_results.append( + self.create_matcherror( + filename=file, + linenumber=key.ansible_pos[1] + if isinstance(key, AnsibleUnicode) + else our_vars[LINE_NUMBER_KEY], + message="Play defines variable '" + + key + + "' within 'vars' section that violates variable naming standards", + tag=f"var-naming[{key}]", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + + return results + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> list[MatchError]: + """Return matches for task based variables.""" + results = [] + # If the task uses the 'vars' section to set variables + our_vars = task.get("vars", {}) + for key in our_vars.keys(): + if self.is_invalid_variable_name(key): + results.append( + self.create_matcherror( + filename=file, + linenumber=our_vars[LINE_NUMBER_KEY], + message=f"Task defines variable within 'vars' section that violates variable naming standards: {key}", + tag=f"var-naming[{key}]", + ) + ) + + # If the task uses the 'set_fact' module + ansible_module = task["action"]["__ansible_module__"] + if ansible_module == "set_fact": + for key in filter( + lambda x: isinstance(x, str) and not x.startswith("__"), + task["action"].keys(), + ): + if self.is_invalid_variable_name(key): + results.append( + self.create_matcherror( + filename=file, + linenumber=task["action"][LINE_NUMBER_KEY], + message=f"Task uses 'set_fact' to define variables that violates variable naming standards: {key}", + tag=f"var-naming[{key}]", + ) + ) + + # If the task registers a variable + registered_var = task.get("register", None) + if registered_var and self.is_invalid_variable_name(registered_var): + results.append( + self.create_matcherror( + filename=file, + linenumber=task[LINE_NUMBER_KEY], + message=f"Task registers a variable that violates variable naming standards: {registered_var}", + tag=f"var-naming[{registered_var}]", + ) + ) + + return results + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches for variables defined in vars files.""" + results: list[MatchError] = [] + raw_results: list[MatchError] = [] + meta_data: dict[AnsibleUnicode, Any] = {} + + if str(file.kind) == "vars" and file.data: + meta_data = parse_yaml_from_file(str(file.path)) + for key in meta_data.keys(): + if self.is_invalid_variable_name(key): + raw_results.append( + self.create_matcherror( + filename=file, + linenumber=key.ansible_pos[1], + message="File defines variable '" + + key + + "' that violates variable naming standards", + ) + ) + if raw_results: + lines = file.content.splitlines() + for match in raw_results: + # linenumber starts with 1, not zero + skip_list = get_rule_skips_from_line(lines[match.linenumber - 1]) + if match.rule.id not in skip_list and match.tag not in skip_list: + results.append(match) + else: + results.extend(super().matchyaml(file)) + return results + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + from ansiblelint.testing import ( # pylint: disable=ungrouped-imports + RunFromText, + run_ansible_lint, + ) + + @pytest.mark.parametrize( + ("file", "expected"), + ( + pytest.param("examples/playbooks/rule-var-naming-fail.yml", 7, id="0"), + pytest.param("examples/Taskfile.yml", 0, id="1"), + ), + ) + def test_invalid_var_name_playbook(file: str, expected: int) -> None: + """Test rule matches.""" + rules = RulesCollection(options=options) + rules.register(VariableNamingRule()) + results = Runner(Lintable(file), rules=rules).run() + # results = rule_runner.run() + assert len(results) == expected + for result in results: + assert result.rule.id == VariableNamingRule.id + # We are not checking line numbers because they can vary between + # different versions of ruamel.yaml (and depending on presence/absence + # of its c-extension) + + @pytest.mark.parametrize( + "rule_runner", (VariableNamingRule,), indirect=["rule_runner"] + ) + def test_invalid_var_name_varsfile(rule_runner: RunFromText) -> None: + """Test rule matches.""" + results = rule_runner.run_role_defaults_main(FAIL_VARS) + assert len(results) == 2 + for result in results: + assert result.rule.id == VariableNamingRule.id + + # list unexpected error lines or non-matching error lines + expected_error_lines = [2, 6] + lines = [i.linenumber for i in results] + error_lines_difference = list( + set(expected_error_lines).symmetric_difference(set(lines)) + ) + assert len(error_lines_difference) == 0 + + def test_var_naming_with_pattern() -> None: + """Test rule matches.""" + role_path = "examples/roles/var_naming_pattern/tasks/main.yml" + conf_path = "examples/roles/var_naming_pattern/.ansible-lint" + result = run_ansible_lint( + f"--config-file={conf_path}", + role_path, + ) + assert result.returncode == SUCCESS_RC + assert "var-naming" not in result.stdout diff --git a/src/ansiblelint/rules/yaml.md b/src/ansiblelint/rules/yaml.md new file mode 100644 index 0000000..03055d0 --- /dev/null +++ b/src/ansiblelint/rules/yaml.md @@ -0,0 +1,92 @@ +# yaml + +This rule checks YAML syntax and is an implementation of `yamllint`. + +You can disable YAML syntax violations by adding `yaml` to the `skip_list` in +your Ansible-lint configuration as follows: + +```yaml +skip_list: + - yaml +``` + +For more fine-grained control, disable violations for specific rules using tag +identifiers in the `yaml[yamllint_rule]` format as follows: + +```yaml +skip_list: + - yaml[trailing-spaces] + - yaml[indentation] +``` + +If you want Ansible-lint to report YAML syntax violations as warnings, and not +fatal errors, add tag identifiers to the `warn_list` in your configuration, for +example: + +```yaml +warn_list: + - yaml[document-start] +``` + +See the +[list of yamllint rules](https://yamllint.readthedocs.io/en/stable/rules.html) +for more information. + +Some of the detailed error codes that you might see are: + +- `yaml[brackets]` - _too few spaces inside empty brackets_, or _too many spaces + inside brackets_ +- `yaml[colons]` - _too many spaces before colon_, or _too many spaces after + colon_ +- `yaml[commas]` - _too many spaces before comma_, or _too few spaces after + comma_ +- `yaml[comments-indentation]` - _Comment not indented like content_ +- `yaml[comments]` - _Too few spaces before comment_, or _Missing starting space + in comment_ +- `yaml[document-start]` - _missing document start "---"_ or _found forbidden + document start "---"_ +- `yaml[empty-lines]` - _too many blank lines (...> ...)_ +- `yaml[indentation]` - _Wrong indentation: expected ... but found ..._ +- `yaml[key-duplicates]` - _Duplication of key "..." in mapping_ +- `yaml[new-line-at-end-of-file]` - _No new line character at the end of file_ +- `yaml[octal-values]`: forbidden implicit or explicit [octal](#octals) value +- `yaml[syntax]` - YAML syntax is broken +- `yaml[trailing-spaces]` - Spaces are found at the end of lines +- `yaml[truthy]` - _Truthy value should be one of ..._ + +## Octals + +As [YAML specification] regarding octal values changed at least 3 times in +[1.1], [1.2.0] and [1.2.2] we now require users to always add quotes around +octal values, so the YAML loaders will all load them as strings, providing a +consistent behavior. This is also safer as JSON does not support octal values +either. + +By default, yamllint does not check for octals but our custom default ruleset +for it does check these. If for some reason, you do not want to follow our +defaults, you can create a `.yamllint` file in your project and this will take +precedence over our defaults. + +## Problematic code + +```yaml +# Missing YAML document start. +foo: 0777 # <-- yaml[octal-values] +foo2: 0o777 # <-- yaml[octal-values] +foo2: ... # <-- yaml[key-duplicates] +bar: ... # <-- yaml[comments-indentation] +``` + +## Correct code + +```yaml +--- +foo: "0777" # <-- Explicitly quoting octal is less risky. +foo2: "0o777" # <-- Explicitly quoting octal is less risky. +bar: ... # Correct comment indentation. +``` + +[1.1]: https://yaml.org/spec/1.1/ +[1.2.0]: https://yaml.org/spec/1.2.0/ +[1.2.2]: https://yaml.org/spec/1.2.2/ +[yaml specification]: https://yaml.org/ diff --git a/src/ansiblelint/rules/yaml_rule.py b/src/ansiblelint/rules/yaml_rule.py new file mode 100644 index 0000000..d731fc0 --- /dev/null +++ b/src/ansiblelint/rules/yaml_rule.py @@ -0,0 +1,179 @@ +"""Implementation of yaml linting rule (yamllint integration).""" +from __future__ import annotations + +import logging +import sys +from typing import TYPE_CHECKING, Iterable + +from yamllint.linter import run as run_yamllint + +from ansiblelint.constants import LINE_NUMBER_KEY, SKIPPED_RULES_KEY +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule +from ansiblelint.yaml_utils import load_yamllint_config + +if TYPE_CHECKING: + from typing import Any, Generator + + from ansiblelint.errors import MatchError + +_logger = logging.getLogger(__name__) + + +class YamllintRule(AnsibleLintRule): + """Violations reported by yamllint.""" + + id = "yaml" + severity = "VERY_LOW" + tags = ["formatting", "yaml"] + version_added = "v5.0.0" + config = load_yamllint_config() + has_dynamic_tags = True + link = "https://yamllint.readthedocs.io/en/stable/rules.html" + # ensure this rule runs before most of other common rules + _order = 1 + + def matchyaml(self, file: Lintable) -> list[MatchError]: + """Return matches found for a specific YAML text.""" + matches: list[MatchError] = [] + filtered_matches: list[MatchError] = [] + if str(file.base_kind) != "text/yaml": + return matches + + for problem in run_yamllint( + file.content, YamllintRule.config, filepath=file.path + ): + self.severity = "VERY_LOW" + if problem.level == "error": + self.severity = "MEDIUM" + if problem.desc.endswith("(syntax)"): + self.severity = "VERY_HIGH" + matches.append( + self.create_matcherror( + # yamllint does return lower-case sentences + message=problem.desc.capitalize(), + linenumber=problem.line, + details="", + filename=file, + tag=f"yaml[{problem.rule}]", + ) + ) + + # Now we save inside the file the skips, so they can be removed later, + # especially as these skips can be about other rules than yaml one. + _fetch_skips(file.data, file.line_skips) + + for match in matches: + last_skips = set() + + for line, skips in file.line_skips.items(): + if line > match.linenumber: + break + last_skips = skips + if last_skips.intersection({"skip_ansible_lint", match.rule.id, match.tag}): + continue + filtered_matches.append(match) + + return filtered_matches + + +def _combine_skip_rules(data: Any) -> set[str]: + """Return a consolidated list of skipped rules.""" + result = set(data.get(SKIPPED_RULES_KEY, [])) + tags = data.get("tags", []) + if tags and ( + isinstance(tags, Iterable) + and "skip_ansible_lint" in tags + or tags == "skip_ansible_lint" + ): + result.add("skip_ansible_lint") + return result + + +def _fetch_skips(data: Any, collector: dict[int, set[str]]) -> dict[int, set[str]]: + """Retrieve a dictionary with line: skips by looking recursively in given JSON structure.""" + if hasattr(data, "get") and data.get(LINE_NUMBER_KEY): + rules = _combine_skip_rules(data) + if rules: + collector[data.get(LINE_NUMBER_KEY)].update(rules) + if isinstance(data, Iterable) and not isinstance(data, str): + if isinstance(data, dict): + for entry, value in data.items(): + _fetch_skips(value, collector) + else: # must be some kind of list + for entry in data: + if ( + entry + and hasattr(data, "get") + and LINE_NUMBER_KEY in entry + and SKIPPED_RULES_KEY in entry + and entry[SKIPPED_RULES_KEY] + ): + collector[entry[LINE_NUMBER_KEY]].update(entry[SKIPPED_RULES_KEY]) + _fetch_skips(entry, collector) + return collector + + +# testing code to be loaded only with pytest or when executed the rule file +if "pytest" in sys.modules: + import pytest + + # pylint: disable=ungrouped-imports + from ansiblelint.config import options + from ansiblelint.rules import RulesCollection + from ansiblelint.runner import Runner + + @pytest.mark.parametrize( + ("file", "expected_kind", "expected"), + ( + ( + "examples/yamllint/invalid.yml", + "yaml", + [ + 'Missing document start "---"', + 'Duplication of key "foo" in mapping', + "Trailing spaces", + ], + ), + ( + "examples/yamllint/valid.yml", + "yaml", + [], + ), + ( + "examples/yamllint/multi-document.yaml", + "yaml", + [], + ), + ), + ids=( + "invalid", + "valid", + "multi-document", + ), + ) + def test_yamllint(file: str, expected_kind: str, expected: list[str]) -> None: + """Validate parsing of ansible output.""" + lintable = Lintable(file) + assert lintable.kind == expected_kind + + rules = RulesCollection(options=options) + rules.register(YamllintRule()) + results = Runner(lintable, rules=rules).run() + + assert len(results) == len(expected), results + for idx, result in enumerate(results): + assert result.filename.endswith(file) + assert expected[idx] in result.message + assert isinstance(result.tag, str) + assert result.tag.startswith("yaml[") + + def test_yamllint_has_help(default_rules_collection: RulesCollection) -> None: + """Asserts that we loaded markdown documentation in help property.""" + for collection in default_rules_collection: + if collection.id == "yaml": + assert collection.help is not None + assert len(collection.help) > 100 + break + else: + pytest.fail("No yaml collection found") diff --git a/src/ansiblelint/runner.py b/src/ansiblelint/runner.py new file mode 100644 index 0000000..a98fe51 --- /dev/null +++ b/src/ansiblelint/runner.py @@ -0,0 +1,249 @@ +"""Runner implementation.""" +from __future__ import annotations + +import logging +import multiprocessing +import multiprocessing.pool +import os +from dataclasses import dataclass +from fnmatch import fnmatch +from typing import TYPE_CHECKING, Any, Generator + +import ansiblelint.skip_utils +import ansiblelint.utils +from ansiblelint._internal.rules import LoadingFailureRule +from ansiblelint.constants import States +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, expand_dirs_in_lintables +from ansiblelint.rules.syntax_check import AnsibleSyntaxCheckRule + +if TYPE_CHECKING: + from argparse import Namespace + + from ansiblelint.rules import RulesCollection + +_logger = logging.getLogger(__name__) + + +@dataclass +class LintResult: + """Class that tracks result of linting.""" + + matches: list[MatchError] + files: set[Lintable] + + +class Runner: + """Runner class performs the linting process.""" + + # pylint: disable=too-many-arguments,too-many-instance-attributes + def __init__( + self, + *lintables: Lintable | str, + rules: RulesCollection, + tags: frozenset[Any] = frozenset(), + skip_list: list[str] | None = None, + exclude_paths: list[str] | None = None, + verbosity: int = 0, + checked_files: set[Lintable] | None = None, + project_dir: str | None = None, + ) -> None: + """Initialize a Runner instance.""" + self.rules = rules + self.lintables: set[Lintable] = set() + self.project_dir = os.path.abspath(project_dir) if project_dir else None + + if skip_list is None: + skip_list = [] + if exclude_paths is None: + exclude_paths = [] + + # Assure consistent type + for item in lintables: + if not isinstance(item, Lintable): + item = Lintable(item) + self.lintables.add(item) + + # Expand folders (roles) to their components + expand_dirs_in_lintables(self.lintables) + + self.tags = tags + self.skip_list = skip_list + self._update_exclude_paths(exclude_paths) + self.verbosity = verbosity + if checked_files is None: + checked_files = set() + self.checked_files = checked_files + + def _update_exclude_paths(self, exclude_paths: list[str]) -> None: + if exclude_paths: + # These will be (potentially) relative paths + paths = ansiblelint.file_utils.expand_paths_vars(exclude_paths) + # Since ansiblelint.utils.find_children returns absolute paths, + # and the list of files we create in `Runner.run` can contain both + # relative and absolute paths, we need to cover both bases. + self.exclude_paths = paths + [os.path.abspath(p) for p in paths] + else: + self.exclude_paths = [] + + def is_excluded(self, lintable: Lintable) -> bool: + """Verify if a file path should be excluded.""" + # Any will short-circuit as soon as something returns True, but will + # be poor performance for the case where the path under question is + # not excluded. + + # Exclusions should be evaluated only using absolute paths in order + # to work correctly. + abs_path = str(lintable.abspath) + if self.project_dir and not abs_path.startswith(self.project_dir): + _logger.debug( + "Skipping %s as it is outside of the project directory.", abs_path + ) + return True + + return any( + abs_path.startswith(path) + or lintable.path.match(path) + or fnmatch(str(abs_path), path) + or fnmatch(str(lintable), path) + for path in self.exclude_paths + ) + + def run(self) -> list[MatchError]: # noqa: C901 + """Execute the linting process.""" + files: list[Lintable] = [] + matches: list[MatchError] = [] + + # remove exclusions + for lintable in self.lintables.copy(): + if self.is_excluded(lintable): + _logger.debug("Excluded %s", lintable) + self.lintables.remove(lintable) + continue + if isinstance(lintable.data, States) and lintable.exc: + matches.append( + MatchError( + filename=lintable, + message=str(lintable.exc), + details=str(lintable.exc.__cause__), + rule=LoadingFailureRule(), + ) + ) + lintable.stop_processing = True + + # -- phase 1 : syntax check in parallel -- + def worker(lintable: Lintable) -> list[MatchError]: + # pylint: disable=protected-access + return AnsibleSyntaxCheckRule._get_ansible_syntax_check_matches(lintable) + + # playbooks: List[Lintable] = [] + for lintable in self.lintables: + if lintable.kind != "playbook" or lintable.stop_processing: + continue + files.append(lintable) + + # avoid resource leak warning, https://github.com/python/cpython/issues/90549 + # pylint: disable=unused-variable + global_resource = multiprocessing.Semaphore() + + pool = multiprocessing.pool.ThreadPool(processes=multiprocessing.cpu_count()) + return_list = pool.map(worker, files, chunksize=1) + pool.close() + pool.join() + for data in return_list: + matches.extend(data) + + # -- phase 2 --- + if not matches: + # do our processing only when ansible syntax check passed in order + # to avoid causing runtime exceptions. Our processing is not as + # resilient to be able process garbage. + matches.extend(self._emit_matches(files)) + + # remove duplicates from files list + files = [value for n, value in enumerate(files) if value not in files[:n]] + + for file in self.lintables: + if file in self.checked_files or not file.kind: + continue + _logger.debug( + "Examining %s of type %s", + ansiblelint.file_utils.normpath(file.path), + file.kind, + ) + + matches.extend( + self.rules.run(file, tags=set(self.tags), skip_list=self.skip_list) + ) + + # update list of checked files + self.checked_files.update(self.lintables) + + # remove any matches made inside excluded files + matches = list( + filter( + lambda match: not self.is_excluded(Lintable(match.filename)) + and hasattr(match, "lintable") + and match.tag not in match.lintable.line_skips[match.linenumber], + matches, + ) + ) + + return sorted(set(matches)) + + def _emit_matches(self, files: list[Lintable]) -> Generator[MatchError, None, None]: + visited: set[Lintable] = set() + while visited != self.lintables: + for lintable in self.lintables - visited: + try: + for child in ansiblelint.utils.find_children(lintable): + if self.is_excluded(child): + continue + self.lintables.add(child) + files.append(child) + except MatchError as exc: + if not exc.filename: # pragma: no branch + exc.filename = str(lintable.path) + exc.rule = LoadingFailureRule() + yield exc + except AttributeError: + yield MatchError(filename=lintable, rule=LoadingFailureRule()) + visited.add(lintable) + + +def _get_matches(rules: RulesCollection, options: Namespace) -> LintResult: + lintables = ansiblelint.utils.get_lintables(opts=options, args=options.lintables) + + for rule in rules: + if "unskippable" in rule.tags: + for entry in (*options.skip_list, *options.warn_list): + if rule.id == entry or entry.startswith(f"{rule.id}["): + raise RuntimeError( + f"Rule '{rule.id}' is unskippable, you cannot use it in 'skip_list' or 'warn_list'. Still, you could exclude the file." + ) + matches = [] + checked_files: set[Lintable] = set() + runner = Runner( + *lintables, + rules=rules, + tags=options.tags, + skip_list=options.skip_list, + exclude_paths=options.exclude_paths, + verbosity=options.verbosity, + checked_files=checked_files, + project_dir=options.project_dir, + ) + matches.extend(runner.run()) + + # Assure we do not print duplicates and the order is consistent + matches = sorted(set(matches)) + + # Convert reported filenames into human readable ones, so we hide the + # fact we used temporary files when processing input from stdin. + for match in matches: + for lintable in lintables: + if match.filename == lintable.filename: + match.filename = lintable.name + break + + return LintResult(matches=matches, files=checked_files) diff --git a/src/ansiblelint/schemas/README.md b/src/ansiblelint/schemas/README.md new file mode 100644 index 0000000..db1a554 --- /dev/null +++ b/src/ansiblelint/schemas/README.md @@ -0,0 +1,102 @@ +# Schemas for Ansible and its related tools + +[![ci](https://github.com/ansible-community/schemas/actions/workflows/task.yml/badge.svg)](https://github.com/ansible-community/schemas/actions/workflows/task.yml) +[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Repository License: MIT](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE) + +## About Schemas + +This project aims to generate JSON/YAML validation schemas for Ansible files +such as playbooks, tasks, requirements, meta or vars and also for Molecule +configuration. + +Keep in mind that these schemas will limit your freedom of choice regarding the +syntax you can use to write Ansible tasks as they do not allow some historical +forms which are still allowed by Ansible itself. + +Not any file accepted by Ansible will pass these schemas but we do expect that +any file that passed these schemas should be accepted by Ansible. + +- YAML 1.2 booleans are required as `true` or `false`, while Ansible itself + allows you to use more relaxed forms like `yes` or `no`. +- Inline actions are not allowed, as schema cannot validate them +- Non-built-in modules must be called using `action:` blocks +- Module arguments are not yet verified but we plan to implement it +- Out schemas are strict about usage of jinja2 templating and require `{{` on + arguments declared as **explicit**, which forbid the use of `{{` on those + marked as **implicit**. See the section below for details. + +As these schemas are still experimental, creating pull requests to improve the +schema is of much greater help. Though you are still welcome to report bugs but +expect them to take a long time until someone finds time to fix them. + +If you want to help improve the schemas, have a look at the +[development documentation](CONTRIBUTING.md). + +## Schema Bundle + +We are currently migrating towards a single [ansible.json](/f/ansible.json) +schema bundle, one that contains subschema definitions for all the supported +file types. + +To configure your validator or editor to use the bundle, use the new URLs below, +the part after the `#` in the URLs is essential for informing the loader about +which subschema to use. You can also look at our +[settings.json](.vscode/settings.json) to understand how to configure the +[vscode-yaml](https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml) +extension. + +- [playbook subschema url](https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible.json#/$defs/playbook) +- [tasks subschema uri](https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible.json#/$defs/tasks) + +## Jinja2 implicit vs explicit templating + +While Ansible might allow you to combine implicit and explicit templating, our +schema will not. Our schemas will only allow you to use the recommended form, +either by forbidding you to use the curly braces on implicit ones or forcing you +to add them on explicit ones. + +Examples: + +```yaml +- name: some task + command: echo 123 + register: result + vars: + become_method_var: sudo + become_method: become_method_var # <-- schema will not allow this + # become_method: "{{ become_method_var }}" # <-- that is allowed +``` + +### How to find if a field is implicit or explicit? + +Run assuming that your keyword is `no_log`, you can run +`ansible-doc -t keyword no_log`, which will give you the following output: + +```yaml +failed_when: + applies_to: + - Task + description: + Conditional expression that overrides the task's normal 'failed' status. + priority: 0 + template: implicit + type: list +``` + +As you can see the `template` field tells you if is implicit or explicit. + +Being more restrictive, schema protects you from common accidents, like writing +a simple string in an explicit field. That will always evaluate as true instead +of being evaluated as a jinja template. + +## Activating the schemas + +At this moment installing +[Ansible VS Code Extension by Red Hat](https://marketplace.visualstudio.com/items?itemName=redhat.ansible) +will activate these schemas. The file patterns used to trigger their use can be +seen +[here](https://github.com/ansible-community/vscode-ansible/blob/master/package.json#L44-L94) + +Because these schemas are generic, you can easily use them with any validators +that support them. diff --git a/src/ansiblelint/schemas/__init__.py b/src/ansiblelint/schemas/__init__.py new file mode 100644 index 0000000..bc93fe4 --- /dev/null +++ b/src/ansiblelint/schemas/__init__.py @@ -0,0 +1,4 @@ +"""Module containing cached JSON schemas.""" +from ansiblelint.schemas.main import JSON_SCHEMAS, refresh_schemas, validate_file_schema + +__all__ = ("JSON_SCHEMAS", "refresh_schemas", "validate_file_schema") diff --git a/src/ansiblelint/schemas/__main__.py b/src/ansiblelint/schemas/__main__.py new file mode 100644 index 0000000..301de7d --- /dev/null +++ b/src/ansiblelint/schemas/__main__.py @@ -0,0 +1,13 @@ +"""Module containing cached JSON schemas.""" +import sys + +from ansiblelint.schemas.main import refresh_schemas + +if __name__ == "__main__": + if refresh_schemas(): # pragma: no cover + # flake8: noqa: T201 + print("Schemas were updated.") + sys.exit(1) + else: # pragma: no cover + # flake8: noqa: T201 + print("Schemas not updated", 0) diff --git a/src/ansiblelint/schemas/__store__.json b/src/ansiblelint/schemas/__store__.json new file mode 100644 index 0000000..3150734 --- /dev/null +++ b/src/ansiblelint/schemas/__store__.json @@ -0,0 +1,58 @@ +{ + "ansible-lint-config": { + "etag": "45ec120948f291620e297af0d75625ceb79d9295e2ec8b31652948c31ceeb209", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-lint-config.json" + }, + "ansible-navigator-config": { + "etag": "c1038568788867f861cdba2aefde88a71b8ab1a6d874ecfa84eb14c110787677", + "url": "https://raw.githubusercontent.com/ansible/ansible-navigator/main/src/ansible_navigator/data/ansible-navigator.json" + }, + "arg_specs": { + "etag": "bd98c32fe4b9672bdadb85efd0dbfded7ef08b6cfda5c0ff91fb1cf45e274e0e", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/arg_specs.json" + }, + "changelog": { + "etag": "f433b08eb8b9c5e358e9e479a79912da4a0f601077b8be52378526c27bc1c13b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/changelog.json" + }, + "execution-environment": { + "etag": "17ebd7426f2f31e362f7e0eae6683fbb17b83983bc0fdfc2cdf5c83e1ed38808", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/execution-environment.json" + }, + "galaxy": { + "etag": "90d0beb8a8ec0fc9ebdc146f3d19f1566c648ed5574b381ed63e06cb15de4d37", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/galaxy.json" + }, + "inventory": { + "etag": "47d61f6b6f9f84b32414245ae1d0800cffe4ba8adc0a1307dfbdab8d195af3e6", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json" + }, + "meta": { + "etag": "090cc0a998e0251c48ecf91b62607fb93cc75cf8553545a4fa26fa808873d236", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta.json" + }, + "meta-runtime": { + "etag": "c1633dcafd016a44e2aba7574136983fef3da2bbb74eeb61785424081e8a82a8", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta-runtime.json" + }, + "molecule": { + "etag": "a61d2a529f04686f0c7e171e50ab6182798252fba5d955a991134ab5b5c742f8", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/molecule.json" + }, + "playbook": { + "etag": "f49882815c54f8595c4e2b9cbcf8f48b1bf5dee892b4cf8938d13343c8448d7b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/playbook.json" + }, + "requirements": { + "etag": "a11edf24f416043f2da8dd329f1d61338fc9708e017bd3cbe43d8c06e4b30090", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/requirements.json" + }, + "tasks": { + "etag": "7725d87e98752a96967cce8a62bc8593c15a85582f1cdefef6afce61a6a9bbe5", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/tasks.json" + }, + "vars": { + "etag": "09aba62c089e2c56d414919979d3cc055c8983d721fb18e2f612c1bee028e53b", + "url": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/vars.json" + } +} diff --git a/src/ansiblelint/schemas/ansible-lint-config.json b/src/ansiblelint/schemas/ansible-lint-config.json new file mode 100644 index 0000000..ad5373c --- /dev/null +++ b/src/ansiblelint/schemas/ansible-lint-config.json @@ -0,0 +1,256 @@ +{ + "$defs": { + "rule": { + "additionalProperties": false, + "properties": { + "exclude_paths": { + "items": { + "type": "string" + }, + "title": "Glob-like paths to be excluded.", + "type": "array" + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-lint-config.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": [".ansible-lint", ".config/ansible-lint.yml"], + "properties": { + "display_relative_path": { + "default": true, + "title": "Configure how to display file paths", + "type": "boolean" + }, + "enable_list": { + "items": { + "type": "string" + }, + "title": "Enable List", + "type": "array" + }, + "exclude_paths": { + "items": { + "type": "string" + }, + "title": "Exclude Paths", + "type": "array" + }, + "extra_vars": { + "title": "Extra Vars", + "type": "object" + }, + "kinds": { + "items": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "title": "Kinds", + "type": "array" + }, + "loop_var_prefix": { + "title": "Loop Var Prefix", + "type": "string" + }, + "mock_modules": { + "items": { + "type": "string" + }, + "title": "Mock Modules", + "type": "array" + }, + "mock_roles": { + "items": { + "type": "string" + }, + "title": "Mock Roles", + "type": "array" + }, + "offline": { + "default": false, + "title": "Offline", + "type": "boolean" + }, + "only_builtins_allow_collections": { + "items": { + "type": "string" + }, + "title": "Only Builtins Allow Collections", + "type": "array" + }, + "only_builtins_allow_modules": { + "items": { + "type": "string" + }, + "title": "Only Builtins Allow Modules", + "type": "array" + }, + "parseable": { + "default": true, + "title": "Parseable", + "type": "boolean" + }, + "profile": { + "enum": [ + "min", + "basic", + "moderate", + "safety", + "shared", + "production", + null + ], + "title": "Profile", + "type": ["null", "string"] + }, + "progressive": { + "default": false, + "title": "Progressive", + "type": "boolean" + }, + "quiet": { + "default": true, + "title": "Quiet", + "type": "boolean" + }, + "rules": { + "additionalProperties": { + "$ref": "#/$defs/rule" + }, + "propertyNames": { + "enum": [ + "command-instead-of-module", + "command-instead-of-shell", + "deprecated-bare-vars", + "deprecated-command-syntax", + "deprecated-local-action", + "deprecated-module", + "empty-string-compare", + "fqcn", + "fqcn[action-core]", + "fqcn[action-redirect]", + "fqcn[action]", + "galaxy", + "galaxy[no-changelog]", + "galaxy[tags]", + "galaxy[version-incorrect]", + "galaxy[version-missing]", + "ignore-errors", + "inline-env-var", + "internal-error", + "jinja", + "key-order", + "latest", + "literal-compare", + "load-failure", + "meta-incorrect", + "meta-no-info", + "meta-no-tags", + "meta-runtime", + "meta-video-links", + "name", + "no-changed-when", + "no-handler", + "no-jinja-when", + "no-log-password", + "loop-var-prefix", + "no-prompting", + "no-relative-paths", + "no-same-owner", + "no-tabs", + "only-builtins", + "package-latest", + "parser-error", + "partial-become", + "playbook-extension", + "risky-file-permissions", + "risky-octal", + "risky-shell-pipe", + "role-name", + "schema", + "syntax-check", + "var-naming", + "yaml" + ] + }, + "title": "Rules specific configuration.", + "type": "object" + }, + "rulesdir": { + "items": { + "type": "string" + }, + "title": "Rulesdir", + "type": "array" + }, + "sarif_file": { + "default": null, + "title": "SARIF Output filename", + "type": ["null", "string"] + }, + "skip_action_validation": { + "default": false, + "title": "Skip Action Validation", + "type": "boolean" + }, + "skip_list": { + "items": { + "type": "string" + }, + "title": "Skip List", + "type": "array" + }, + "strict": { + "default": false, + "title": "Strict", + "type": "boolean" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": "array" + }, + "task_name_prefix": { + "default": "{stem} | ", + "title": "Allow custom prefix for task[prefix]", + "type": "string" + }, + "use_default_rules": { + "default": true, + "title": "Use Default Rules", + "type": "boolean" + }, + "var_naming_pattern": { + "default": "^[a-z_][a-z0-9_]*$", + "title": "Regex used to verify variable names", + "type": "string" + }, + "verbosity": { + "default": 0, + "title": "Verbosity", + "type": "integer" + }, + "warn_list": { + "items": { + "type": "string" + }, + "title": "Warn List", + "type": "array" + }, + "write_list": { + "items": { + "type": "string" + }, + "title": "Write List", + "type": "array" + } + }, + "title": "Ansible-lint Configuration Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/ansible-navigator-config.json b/src/ansiblelint/schemas/ansible-navigator-config.json new file mode 100644 index 0000000..5acc39c --- /dev/null +++ b/src/ansiblelint/schemas/ansible-navigator-config.json @@ -0,0 +1,517 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "properties": { + "ansible-navigator": { + "additionalProperties": false, + "properties": { + "ansible": { + "additionalProperties": false, + "properties": { + "cmdline": { + "description": "Extra parameters passed to the corresponding command", + "type": "string" + }, + "config": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-config command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "path": { + "description": "Specify the path to the ansible configuration file", + "type": "string" + } + } + }, + "doc": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-doc command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "plugin": { + "additionalProperties": false, + "properties": { + "name": { + "description": "Specify the plugin name", + "type": "string" + }, + "type": { + "default": "module", + "description": "Specify the plugin type, 'become', 'cache', 'callback', 'cliconf', 'connection', 'httpapi', 'inventory', 'lookup', 'module', 'netconf', 'shell', 'strategy' or 'vars'", + "enum": [ + "become", + "cache", + "callback", + "cliconf", + "connection", + "httpapi", + "inventory", + "lookup", + "module", + "netconf", + "shell", + "strategy", + "vars" + ], + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "inventory": { + "additionalProperties": false, + "properties": { + "entries": { + "description": "Specify an inventory file path or comma separated host list", + "items": { + "type": "string" + }, + "type": "array" + }, + "help": { + "default": false, + "description": "Help options for ansible-inventory command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + } + } + }, + "playbook": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-playbook command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "path": { + "description": "Specify the playbook name", + "type": "string" + } + } + } + }, + "type": "object" + }, + "ansible-builder": { + "additionalProperties": false, + "properties": { + "help": { + "default": false, + "description": "Help options for ansible-builder command in stdout mode", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "workdir": { + "default": ".", + "description": "Specify the path that contains ansible-builder manifest files", + "type": "string" + } + }, + "type": "object" + }, + "ansible-lint": { + "additionalProperties": false, + "properties": { + "config": { + "description": "Specify the path to the ansible-lint configuration file", + "type": "string" + }, + "lintables": { + "description": "Path to files on which to run ansible-lint", + "type": "string" + } + }, + "type": "object" + }, + "ansible-runner": { + "additionalProperties": false, + "properties": { + "artifact-dir": { + "description": "The directory path to store artifacts generated by ansible-runner", + "type": "string" + }, + "rotate-artifacts-count": { + "description": "Keep ansible-runner artifact directories, for last n runs, if set to 0 artifact directories won't be deleted", + "type": "integer" + }, + "timeout": { + "description": "The timeout value after which ansible-runner will forcefully stop the execution", + "type": "integer" + } + }, + "type": "object" + }, + "app": { + "default": "welcome", + "description": "Subcommands", + "enum": [ + "builder", + "collections", + "config", + "doc", + "exec", + "images", + "inventory", + "lint", + "replay", + "run", + "settings", + "welcome" + ], + "type": "string" + }, + "collection-doc-cache-path": { + "default": "~/.cache/ansible-navigator/collection_doc_cache.db", + "description": "The path to collection doc cache", + "type": "string" + }, + "color": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable the use of color for mode interactive and stdout", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "osc4": { + "default": true, + "description": "Enable or disable terminal color changing support with OSC 4", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "editor": { + "additionalProperties": false, + "properties": { + "command": { + "default": "vi +{line_number} {filename}", + "description": "Specify the editor command", + "type": "string" + }, + "console": { + "default": true, + "description": "Specify if the editor is console based", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "enable-prompts": { + "default": false, + "description": "Enable prompts for password and in playbooks. This will set mode to stdout and disable playbook artifact creation", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "exec": { + "additionalProperties": false, + "properties": { + "command": { + "default": "/bin/bash", + "description": "Specify the command to run within the execution environment", + "type": "string" + }, + "shell": { + "default": true, + "description": "Specify the exec command should be run in a shell", + "enum": [ + true, + false + ], + "type": "boolean" + } + }, + "type": "object" + }, + "execution-environment": { + "additionalProperties": false, + "properties": { + "container-engine": { + "default": "auto", + "description": "Specify the container engine (auto=podman then docker)", + "enum": [ + "auto", + "podman", + "docker" + ], + "type": "string" + }, + "container-options": { + "description": "Extra parameters passed to the container engine command", + "items": { + "type": "string" + }, + "type": "array" + }, + "enabled": { + "default": true, + "description": "Enable or disable the use of an execution environment", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "environment-variables": { + "additionalProperties": false, + "properties": { + "pass": { + "description": "Specify an existing environment variable to be passed through to and set within the execution environment (--penv MY_VAR)", + "items": { + "type": "string" + }, + "type": "array" + }, + "set": { + "description": "Specify an environment variable and a value to be set within the execution environment (--senv MY_VAR=42)", + "type": "object" + } + }, + "type": "object" + }, + "image": { + "description": "Specify the name of the execution environment image", + "type": "string" + }, + "pull": { + "additionalProperties": false, + "properties": { + "arguments": { + "description": "Specify any additional parameters that should be added to the pull command when pulling an execution environment from a container registry. e.g. --pa='--tls-verify=false'", + "items": { + "type": "string" + }, + "type": "array" + }, + "policy": { + "default": "tag", + "description": "Specify the image pull policy always:Always pull the image, missing:Pull if not locally available, never:Never pull the image, tag:if the image tag is 'latest', always pull the image, otherwise pull if not locally available", + "enum": [ + "always", + "missing", + "never", + "tag" + ], + "type": "string" + } + } + }, + "volume-mounts": { + "additionalProperties": false, + "description": "Specify volume to be bind mounted within an execution environment (--eev /home/user/test:/home/user/test:Z)", + "items": { + "additionalProperties": false, + "properties": { + "dest": { + "type": "string" + }, + "options": { + "type": "string" + }, + "src": { + "type": "string" + } + }, + "required": [ + "src", + "dest" + ], + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "format": { + "default": "yaml", + "description": "Specify the format for stdout output.", + "enum": [ + "json", + "yaml" + ], + "type": "string" + }, + "images": { + "additionalProperties": false, + "properties": { + "details": { + "default": [ + "everything" + ], + "description": "Provide detailed information about the selected execution environment image", + "items": { + "enum": [ + "ansible_collections", + "ansible_version", + "everything", + "os_release", + "python_packages", + "python_version", + "redhat_release", + "system_packages" + ], + "type": "string" + }, + "type": "array" + } + } + }, + "inventory-columns": { + "description": "Specify a host attribute to show in the inventory view", + "items": { + "type": "string" + }, + "type": "array" + }, + "logging": { + "additionalProperties": false, + "properties": { + "append": { + "default": true, + "description": "Specify if log messages should be appended to an existing log file, otherwise a new log file will be created per session", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "file": { + "default": "./ansible-navigator.log", + "description": "Specify the full path for the ansible-navigator log file", + "type": "string" + }, + "level": { + "default": "warning", + "description": "Specify the ansible-navigator log level", + "enum": [ + "debug", + "info", + "warning", + "error", + "critical" + ], + "type": "string" + } + }, + "type": "object" + }, + "mode": { + "default": "interactive", + "description": "Specify the user-interface mode", + "enum": [ + "stdout", + "interactive" + ], + "type": "string" + }, + "playbook-artifact": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable or disable the creation of artifacts for completed playbooks. Note: not compatible with '--mode stdout' when playbooks require user input", + "enum": [ + true, + false + ], + "type": "boolean" + }, + "replay": { + "description": "Specify the path for the playbook artifact to replay", + "type": "string" + }, + "save-as": { + "default": "{playbook_dir}/{playbook_name}-artifact-{time_stamp}.json", + "description": "Specify the name for artifacts created from completed playbooks. The following placeholders are available: {playbook_dir}, {playbook_name}, {playbook_status}, and {time_stamp}", + "type": "string" + } + }, + "type": "object" + }, + "settings": { + "additionalProperties": false, + "properties": { + "effective": { + "default": false, + "description": "Show the effective settings. Defaults, CLI parameters, environment variables, and the settings file will be combined", + "type": "boolean" + }, + "sample": { + "default": false, + "description": "Generate a sample settings file", + "type": "boolean" + }, + "schema": { + "default": "json", + "description": "Generate a schema for the settings file ('json'= draft-07 JSON Schema)", + "enum": [ + "json" + ], + "type": "string" + }, + "sources": { + "default": false, + "description": "Show the source of each current settings entry", + "type": "boolean" + } + } + }, + "time-zone": { + "default": "UTC", + "description": "Specify the IANA time zone to use or 'local' to use the system time zone", + "type": "string" + } + } + } + }, + "required": [ + "ansible-navigator" + ], + "title": "ansible-navigator settings v2.2", + "type": "object", + "version": "2.2" +} diff --git a/src/ansiblelint/schemas/ansible-navigator.json b/src/ansiblelint/schemas/ansible-navigator.json new file mode 100644 index 0000000..be83649 --- /dev/null +++ b/src/ansiblelint/schemas/ansible-navigator.json @@ -0,0 +1,430 @@ +{ + "$defs": { + "AnsibleBuilderModel": { + "additionalProperties": false, + "properties": { + "workdir": { + "default": "/tmp/", + "description": "Specify the path that contains ansible-builder manifest files", + "title": "Workdir", + "type": "string" + } + }, + "type": "object" + }, + "AnsibleModel": { + "additionalProperties": false, + "properties": { + "cmdline": { + "description": "Extra parameters passed to the corresponding command", + "title": "Cmdline", + "type": "string" + }, + "config": { + "description": "Specify the path to the ansible configuration file", + "title": "Config", + "type": "string" + }, + "inventories": { + "description": "Specify an inventory file path or host list", + "items": { + "type": "string" + }, + "title": "Inventories", + "type": "array" + }, + "playbook": { + "description": "Specify the playbook name", + "title": "Playbook", + "type": "string" + } + }, + "title": "AnsibleModel", + "type": "object" + }, + "AnsibleNavigatorModel": { + "additionalProperties": false, + "properties": { + "ansible": { + "$ref": "#/$defs/AnsibleModel" + }, + "ansible-builder": { + "$ref": "#/$defs/AnsibleBuilderModel" + }, + "ansible-runner": { + "$ref": "#/$defs/AnsibleRunnerModel" + }, + "app": { + "default": "welcome", + "description": "Subcommands", + "enum": [ + "collections", + "config", + "doc", + "exec", + "images", + "inventory", + "replay", + "run", + "welcome" + ], + "title": "App", + "type": "string" + }, + "collection-doc-cache-path": { + "default": "$HOME/.cache/ansible-navigator/collection_doc_cache.db", + "description": "The path to collection doc cache", + "title": "Collection-Doc-Cache-Path", + "type": "string" + }, + "color": { + "$ref": "#/$defs/ColorModel" + }, + "documentation": { + "$ref": "#/$defs/DocumentationModel" + }, + "editor": { + "$ref": "#/$defs/EditorModel" + }, + "exec": { + "$ref": "#/$defs/ExecModel" + }, + "execution-environment": { + "$ref": "#/$defs/ExecutionEnvironmentModel" + }, + "help-builder": { + "default": false, + "description": "Help options for ansible-builder command in stdout mode", + "title": "Help-Builder", + "type": "boolean" + }, + "help-config": { + "default": false, + "description": "Help options for ansible-config command in stdout mode", + "title": "Help-Config", + "type": "boolean" + }, + "help-doc": { + "default": false, + "description": "Help options for ansible-doc command in stdout mode", + "title": "Help-Doc", + "type": "boolean" + }, + "help-inventory": { + "default": false, + "description": "Help options for ansible-inventory command in stdout mode", + "title": "Help-Inventory", + "type": "boolean" + }, + "help-playbook": { + "default": false, + "description": "Help options for ansible-playbook command in stdout mode", + "title": "Help-Playbook", + "type": "boolean" + }, + "inventory-columns": { + "description": "Specify a host attribute to show in the inventory view", + "items": { + "type": "string" + }, + "title": "Inventory-Columns", + "type": "array" + }, + "logging": { + "$ref": "#/$defs/LoggingModel" + }, + "mode": { + "default": "interactive", + "description": "Specify the user-interface mode", + "enum": ["stdout", "interactive"], + "title": "Mode", + "type": "string" + }, + "playbook-artifact": { + "$ref": "#/$defs/PlaybookArtifactModel" + } + }, + "title": "AnsibleNavigatorModel", + "type": "object" + }, + "AnsibleRunnerModel": { + "additionalProperties": false, + "properties": { + "artifact-dir": { + "description": "The directory path to store artifacts generated by ansible-runner", + "title": "Artifact-Dir", + "type": "string" + }, + "rotate-artifacts-count": { + "description": "Keep ansible-runner artifact directories, for last n runs, if set to 0 artifact directories won't be deleted", + "title": "Rotate-Artifacts-Count", + "type": "integer" + }, + "timeout": { + "description": "The timeout value after which ansible-runner will force stop the execution", + "title": "Timeout", + "type": "integer" + } + }, + "title": "AnsibleRunnerModel", + "type": "object" + }, + "ColorModel": { + "additionalProperties": false, + "properties": { + "enable": { + "default": false, + "description": "Enable the use of color in the display", + "title": "Enable", + "type": "boolean" + }, + "osc4": { + "default": true, + "description": "Enable or disable terminal color changing support with OSC 4", + "title": "Osc4", + "type": "boolean" + } + }, + "title": "ColorModel", + "type": "object" + }, + "DocumentationModel": { + "additionalProperties": false, + "properties": { + "plugin": { + "$ref": "#/$defs/PluginModel" + } + }, + "title": "DocumentationModel", + "type": "object" + }, + "EditorModel": { + "additionalProperties": false, + "properties": { + "command": { + "default": "vi +{line_number} {filename}", + "description": "Specify the editor command", + "title": "Command", + "type": "string" + }, + "console": { + "default": true, + "description": "Specify if the editor is console based", + "title": "Console", + "type": "boolean" + } + }, + "title": "EditorModel", + "type": "object" + }, + "EnvironmentVariablesModel": { + "additionalProperties": false, + "properties": { + "pass": { + "description": "Specify an exiting environment variable to be passed through to and set within the execution environment", + "items": { + "type": "string" + }, + "title": "Pass", + "type": "array" + }, + "set": { + "additionalProperties": { + "type": "string" + }, + "description": "Specify an environment variable and a value to be set within the execution environment", + "title": "Set", + "type": "object" + } + }, + "title": "EnvironmentVariablesModel", + "type": "object" + }, + "ExecModel": { + "additionalProperties": false, + "properties": { + "command": { + "default": "/bin/bash", + "description": "Specify the command to run within the execution environment", + "title": "Command", + "type": "string" + }, + "shell": { + "default": true, + "description": "Specify the exec command should be run in a shell", + "title": "Shell", + "type": "boolean" + } + }, + "title": "ExecModel", + "type": "object" + }, + "ExecutionEnvironmentModel": { + "additionalProperties": false, + "properties": { + "container-engine": { + "default": "auto", + "description": "Specify the container engine (auto=podman then docker)", + "enum": ["auto", "podman", "docker"], + "title": "Container-Engine", + "type": "string" + }, + "container-options": { + "description": "Extra parameters passed to the container engine command", + "items": { + "type": "string" + }, + "title": "Container-Options", + "type": "array" + }, + "enabled": { + "default": true, + "description": "Enable or disable the use of an execution environment", + "title": "Enabled", + "type": "boolean" + }, + "environment-variables": { + "$ref": "#/$defs/EnvironmentVariablesModel" + }, + "image": { + "default": "quay.io/ansible/creator-ee:v0.2.0", + "description": "Specify the name of the execution environment image", + "title": "Image", + "type": "string" + }, + "pull-policy": { + "default": "tag", + "description": "Specify the image pull policy.\nalways: Always pull the image\nmissing: Pull if not locally available\nnever: Never pull the image\ntag: if the image tag is 'latest', always pull the image, otherwise pull if not locally available", + "enum": ["always", "missing", "never", "tag"], + "title": "Pull-Policy", + "type": "string" + }, + "volume-mounts": { + "description": "Specify volume to be bind mounted within an execution environment", + "items": { + "$ref": "#/$defs/VolumeMountsModel" + }, + "title": "Volume-Mounts", + "type": "array" + } + }, + "title": "ExecutionEnvironmentModel", + "type": "object" + }, + "LoggingModel": { + "additionalProperties": false, + "properties": { + "append": { + "default": true, + "description": "Specify if log messages should be appended to an existing log file, otherwise a new log file will be created per session", + "title": "Append", + "type": "boolean" + }, + "file": { + "default": "$PWD/ansible-navigator.", + "description": "Specify the full path for the ansible-navigator log file", + "title": "File", + "type": "string" + }, + "level": { + "default": "warning", + "description": "Specify the ansible-navigator log level", + "enum": ["debug", "info", "warning", "error", "critical"], + "title": "Level", + "type": "string" + } + }, + "title": "LoggingModel", + "type": "object" + }, + "PlaybookArtifactModel": { + "additionalProperties": false, + "properties": { + "enable": { + "default": true, + "description": "Enable or disable the creation of artifacts for completed playbooks.\nNote: not compatible with 'mode: stdout' when playbooks require user input", + "title": "Enable", + "type": "boolean" + }, + "replay": { + "description": "Specify the path for the playbook artifact to replay", + "title": "Replay", + "type": "string" + }, + "save-as": { + "default": "{playbook_dir}/{playbook_name}-artifact-{ts_utc}.json", + "description": "Specify the name for artifacts created from completed playbooks", + "title": "Save-As", + "type": "string" + } + }, + "title": "PlaybookArtifactModel", + "type": "object" + }, + "PluginModel": { + "additionalProperties": false, + "properties": { + "name": { + "description": "Specify the plugin name", + "title": "Name", + "type": "string" + }, + "type": { + "default": "module", + "description": "Specify the plugin type", + "enum": [ + "become", + "cache", + "callback", + "cliconf", + "connection", + "httpapi", + "inventory", + "lookup", + "module", + "netconf", + "shell", + "strategy", + "vars" + ], + "title": "Type", + "type": "string" + } + }, + "title": "PluginModel", + "type": "object" + }, + "VolumeMountsModel": { + "additionalProperties": false, + "properties": { + "dest": { + "title": "Dest", + "type": "string" + }, + "label": { + "title": "Label", + "type": "string" + }, + "src": { + "title": "Src", + "type": "string" + } + }, + "required": ["src", "dest"], + "title": "VolumeMountsModel", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/ansible-navigator.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["ansible-navigator.yml"], + "properties": { + "ansible-navigator": { + "$ref": "#/$defs/AnsibleNavigatorModel" + } + }, + "required": ["ansible-navigator"], + "title": "Ansible-Navigator Configuration Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/ansible.json b/src/ansiblelint/schemas/ansible.json new file mode 100644 index 0000000..ef61363 --- /dev/null +++ b/src/ansiblelint/schemas/ansible.json @@ -0,0 +1,1178 @@ +{ + "$defs": { + "ansible.builtin.import_playbook": { + "additionalProperties": false, + "oneOf": [ + { + "not": { + "required": ["import_playbook"] + }, + "required": ["ansible.builtin.import_playbook"] + }, + { + "not": { + "required": ["ansible.builtin.import_playbook"] + }, + "required": ["import_playbook"] + } + ], + "patternProperties": { + "^(ansible\\.builtin\\.)?import_playbook$": { + "markdownDescription": "* Includes a file with a list of plays to be executed.\n * Files with a list of plays can only be included at the top level.\n * You cannot use this action inside a play.\n\nSee [import_playbook](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/import_playbook_module.html)", + "title": "Import Playbook", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "type": "object" + }, + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["block"], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "play": { + "additionalProperties": false, + "allOf": [ + { + "not": { + "required": ["ansible.builtin.import_playbook"] + } + }, + { + "not": { + "required": ["import_playbook"] + } + } + ], + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "fact_path": { + "title": "Fact Path", + "type": "string" + }, + "force_handlers": { + "title": "Force Handlers", + "type": "boolean" + }, + "gather_facts": { + "title": "Gather Facts", + "type": "boolean" + }, + "gather_subset": { + "items": { + "anyOf": [ + { + "enum": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ], + "type": "string" + }, + { + "enum": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ], + "type": "string" + } + ] + }, + "title": "Gather Subset", + "type": "array" + }, + "gather_timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Gather Timeout" + }, + "handlers": { + "$ref": "#/$defs/tasks" + }, + "hosts": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Hosts" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "max_fail_percentage": { + "title": "Max Fail Percentage", + "type": "number" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "order": { + "enum": [ + "default", + "sorted", + "reverse_sorted", + "reverse_inventory", + "shuffle" + ], + "title": "Order", + "type": "string" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "post_tasks": { + "$ref": "#/$defs/tasks" + }, + "pre_tasks": { + "$ref": "#/$defs/tasks" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "roles": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/play-role" + }, + { + "type": "string" + } + ] + }, + "markdownDescription": "Roles let you automatically load related vars, files, tasks, handlers, and other Ansible artifacts based on a known file structure. After you group your content in roles, you can easily reuse them and share them with other users.\n See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "title": "Roles", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "serial": { + "anyOf": [ + { + "$ref": "#/$defs/templated-integer-or-percent" + }, + { + "items": { + "$ref": "#/$defs/templated-integer-or-percent" + }, + "type": "array" + } + ], + "markdownDescription": "Integer, percentage or list of those. See [Setting the batch size with serial](https://docs.ansible.com/ansible/latest/user_guide/playbooks_strategies.html#setting-the-batch-size-with-serial)", + "title": "Batch size" + }, + "strategy": { + "title": "Strategy", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "tasks": { + "$ref": "#/$defs/tasks" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "user": { + "title": "Remote User", + "type": "string" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "vars_files": { + "items": { + "type": "string" + }, + "title": "Vars Files", + "type": ["array", "string", "null"] + }, + "vars_prompt": { + "items": { + "$ref": "#/$defs/vars_prompt" + }, + "markdownDescription": "See [vars_prompt](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html)", + "title": "vars_prompt", + "type": "array" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["hosts"], + "title": "play", + "type": "object" + }, + "play-role": { + "markdownDescription": "See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": ["role"], + "title": "play-role", + "type": "object" + }, + "playbook": { + "examples": ["playbooks/*.yml", "playbooks/*.yaml"], + "items": { + "oneOf": [ + { + "$ref": "#/$defs/ansible.builtin.import_playbook" + }, + { + "$ref": "#/$defs/play" + } + ] + }, + "title": "Ansible Playbook", + "type": "array" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": ["hosts"] + } + }, + { + "not": { + "required": ["tasks"] + } + }, + { + "not": { + "required": ["import_playbook"] + } + }, + { + "not": { + "required": ["block"] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": ["string", "object"] + }, + "loop": { + "title": "Loop", + "type": ["string", "array"] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "tasks": { + "$schema": "http://json-schema.org/draft-07/schema", + "examples": ["tasks/*.yml", "handlers/*.yml"], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": ["array", "null"] + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer-or-percent": { + "oneOf": [ + { + "type": "integer" + }, + { + "pattern": "^\\d+\\.?\\d*%?$", + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "vars_prompt": { + "additionalProperties": false, + "properties": { + "confirm": { + "title": "Confirm", + "type": "boolean" + }, + "default": { + "title": "Default", + "type": "string" + }, + "encrypt": { + "enum": [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", + "md5_crypt", + "bcrypt", + "sha1_crypt", + "sun_md5_crypt", + "sha256_crypt", + "sha512_crypt", + "apr_md5_crypt", + "phpass", + "pbkdf2_digest", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "scram", + "bsd_nthash" + ], + "title": "Encrypt", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "private": { + "default": true, + "title": "Private", + "type": "boolean" + }, + "prompt": { + "title": "Prompt", + "type": "string" + }, + "salt_size": { + "default": 8, + "title": "Salt Size", + "type": "integer" + }, + "unsafe": { + "default": false, + "markdownDescription": "See [unsafe](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html#allowing-special-characters-in-vars-prompt-values)", + "title": "Unsafe", + "type": "boolean" + } + }, + "required": ["name", "prompt"], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansiblelint/main/ansiblelint/schemas/ansible.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": [], + "title": "Ansible Schemas Bundle 22.4", + "type": ["array", "object"] +} diff --git a/src/ansiblelint/schemas/arg_specs.json b/src/ansiblelint/schemas/arg_specs.json new file mode 100644 index 0000000..e5db072 --- /dev/null +++ b/src/ansiblelint/schemas/arg_specs.json @@ -0,0 +1,250 @@ +{ + "$defs": { + "datatype": { + "enum": [ + "str", + "list", + "dict", + "bool", + "int", + "float", + "path", + "raw", + "jsonarg", + "json", + "bytes", + "bits" + ], + "type": "string" + }, + "deprecated_alias": { + "properties": { + "collection_name": { + "type": "string" + }, + "date": { + "type": "string" + }, + "name": { + "type": "string" + }, + "version": { + "type": "string" + } + }, + "required": ["name"], + "type": "object" + }, + "entry_point": { + "additionalProperties": false, + "properties": { + "author": { + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "description": { + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "options": { + "additionalProperties": { + "$ref": "#/$defs/option" + }, + "type": "object" + }, + "seealso": { + "items": { + "oneOf": [ + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "module": { + "type": "string" + } + }, + "required": ["module"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "plugin": { + "type": "string" + }, + "plugin_type": { + "type": "string" + } + }, + "required": ["plugin", "plugin_type"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "ref": { + "type": "string" + } + }, + "required": ["description", "ref"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "description": { + "type": "string" + }, + "link": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": ["description", "link", "name"], + "type": "object" + } + ] + }, + "type": "array" + }, + "short_description": { + "type": "string" + }, + "version_added": { + "type": "string" + } + }, + "required": ["options"], + "title": "Entry Point", + "type": "object" + }, + "option": { + "additionalProperties": false, + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "apply_defaults": { + "type": "string" + }, + "deprecated_aliases": { + "items": { + "$ref": "#/$defs/deprecated_alias" + }, + "type": "array" + }, + "markdownDescription": "xxx", + "options": { + "$ref": "#/$defs/option" + }, + "properties": { + "choices": { + "type": "array" + }, + "default": { + "default": "None" + }, + "description": { + "description": "Detailed explanation of what this option does. It should be written in full sentences.", + "oneOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "elements": { + "$ref": "#/$defs/datatype" + }, + "fallback": { + "default": "None", + "type": "string" + }, + "no_log": { + "default": false, + "type": "boolean" + }, + "option-name": { + "description": "The name of the option/argument.", + "type": "string" + }, + "options": { + "additionalProperties": { + "$ref": "#/$defs/option" + }, + "type": "object" + }, + "required": { + "default": false, + "type": "boolean" + }, + "type": { + "$ref": "#/$defs/datatype", + "markdownDescription": "See [argument-spec](https://docs.ansible.com/ansible/latest/dev_guide/developing_program_flow_modules.html#argument-spec" + }, + "version_added": { + "type": "string" + } + }, + "removed_at_date": { + "type": "string" + }, + "removed_from_collection": { + "type": "string" + }, + "removed_in_version": { + "type": "string" + }, + "title": "Option" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansiblelint/main/src/ansiblelint/schemas/ansible-argument-specs.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["meta/argument_specs.yml"], + "markdownDescription": "Add entry point, usually `main`.\nSee [role-argument-validation](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation)", + "properties": { + "argument_specs": { + "additionalProperties": { + "$ref": "#/$defs/entry_point" + }, + "markdownDescription": "Add entry point, usually `main`.\nSee [role-argument-validation](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation)" + } + }, + "title": "Ansible Argument Specs Schema" +} diff --git a/src/ansiblelint/schemas/changelog.json b/src/ansiblelint/schemas/changelog.json new file mode 100644 index 0000000..ec0d896 --- /dev/null +++ b/src/ansiblelint/schemas/changelog.json @@ -0,0 +1,262 @@ +{ + "$defs": { + "plugin-descriptions": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from plugin `DOCUMENTATION`.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + }, + "release": { + "additionalProperties": false, + "properties": { + "changes": { + "additionalProperties": false, + "properties": { + "breaking_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "bugfixes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "deprecated_features": { + "items": { + "type": "string" + }, + "type": "array" + }, + "known_issues": { + "items": { + "type": "string" + }, + "type": "array" + }, + "major_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "minor_changes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "release_summary": { + "markdownDescription": "This must be valid [reStructuredText](https://en.wikipedia.org/wiki/ReStructuredText).", + "title": "Release Summary", + "type": "string" + }, + "removed_features": { + "items": { + "type": "string" + }, + "type": "array" + }, + "security_fixes": { + "items": { + "type": "string" + }, + "type": "array" + }, + "trivial": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "codename": { + "type": "string" + }, + "fragments": { + "items": { + "type": "string" + }, + "markdownDescription": "List of strings representing filenames of changelog framents.", + "type": "array" + }, + "modules": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from plugin `DOCUMENTATION`.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short module name", + "type": "string" + }, + "namespace": { + "markdownDescription": "Must be `''` for modules directly in `plugins/modules/`, or the dot-separated list of directories the module is in inside the `plugins/modules/` directory. The namespace is used to group new modules by their namespace inside the collection.", + "title": "Namespace", + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "objects": { + "additionalProperties": false, + "properties": { + "playbook": { + "items": { + "properties": { + "description": { + "markdownDescription": "A short description of what the playbook does.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short playbook name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + }, + "role": { + "items": { + "properties": { + "description": { + "markdownDescription": "Value of `short_description` from role's argument spec.", + "title": "Description", + "type": "string" + }, + "name": { + "markdownDescription": "It must not be the FQCN, but the name inside the collection.", + "pattern": "[a-zA-Z0-9_]+", + "title": "Short role name", + "type": "string" + }, + "namespace": { + "type": "null" + } + }, + "type": "object" + }, + "type": "array" + } + }, + "type": "object" + }, + "plugins": { + "additionalProperties": false, + "properties": { + "become": { + "$ref": "#/$defs/plugin-descriptions" + }, + "cache": { + "$ref": "#/$defs/plugin-descriptions" + }, + "callback": { + "$ref": "#/$defs/plugin-descriptions" + }, + "cliconf": { + "$ref": "#/$defs/plugin-descriptions" + }, + "connections": { + "$ref": "#/$defs/plugin-descriptions" + }, + "filter": { + "$ref": "#/$defs/plugin-descriptions" + }, + "httpapi": { + "$ref": "#/$defs/plugin-descriptions" + }, + "inventory": { + "$ref": "#/$defs/plugin-descriptions" + }, + "lookup": { + "$ref": "#/$defs/plugin-descriptions" + }, + "netconf": { + "$ref": "#/$defs/plugin-descriptions" + }, + "shell": { + "$ref": "#/$defs/plugin-descriptions" + }, + "strategy": { + "$ref": "#/$defs/plugin-descriptions" + }, + "test": { + "$ref": "#/$defs/plugin-descriptions" + }, + "vars": { + "$ref": "#/$defs/plugin-descriptions" + } + }, + "type": "object" + }, + "release_date": { + "format": "date", + "markdownDescription": "Use ISO-8601 date format, like 2020-12-31", + "pattern": "\\d\\d\\d\\d-\\d\\d-\\d\\d", + "title": "Date of the release.", + "type": "string" + } + }, + "type": "object" + }, + "semver": { + "pattern": "\\d+.\\d+.\\d+.*", + "title": "Version string following SemVer specification.", + "type": ["string", "null"] + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/changelog.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["changelogs/changelog.yaml"], + "markdownDescription": "Antsibull Changelog Schema is based on [changelog.yaml-format.md](https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelog.yaml-format.md).", + "properties": { + "ancestor": { + "$ref": "#/$defs/semver" + }, + "releases": { + "patternProperties": { + "\\d+.\\d+.\\d+.*": { + "$ref": "#/$defs/release", + "type": "object" + } + }, + "type": "object" + } + }, + "title": "Antsibull Changelog Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/execution-environment.json b/src/ansiblelint/schemas/execution-environment.json new file mode 100644 index 0000000..405019d --- /dev/null +++ b/src/ansiblelint/schemas/execution-environment.json @@ -0,0 +1,70 @@ +{ + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-ee.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "description": "See https://docs.ansible.com/automation-controller/latest/html/userguide/ee_reference.html", + "examples": ["execution-environment.yml"], + "properties": { + "additional_build_steps": { + "properties": { + "append": { + "examples": ["RUN cat /etc/os-release"], + "type": ["string", "array"] + }, + "prepend": { + "examples": ["RUN cat /etc/os-release"], + "type": ["string", "array"] + } + }, + "title": "Commands to append or prepend to container build process.", + "type": "object" + }, + "ansible_config": { + "examples": ["ansible.cfg"], + "title": "Ansible configuration file", + "type": "string" + }, + "build_arg_defaults": { + "additionalProperties": true, + "properties": { + "EE_BASE_IMAGE": { + "type": "string" + } + }, + "type": "object" + }, + "dependencies": { + "description": "Allows adding system, python or galaxy dependencies.", + "properties": { + "galaxy": { + "examples": ["requirements.yml"], + "markdownDescription": "Example `requirements.yml`", + "title": "Optional galaxy file", + "type": "string" + }, + "python": { + "examples": ["requirements.txt"], + "markdownDescription": "Example `requirements.txt`", + "title": "Optional python package dependencies", + "type": "string" + }, + "system": { + "examples": ["bindep.txt"], + "markdownDescription": "Example `bindep.txt`", + "title": "Optional system dependencies using bindep format", + "type": "string" + } + }, + "title": "Dependencies", + "type": "object" + }, + "version": { + "enum": [1], + "title": "Version", + "type": "integer" + } + }, + "required": ["version", "dependencies"], + "title": "Ansible Execution Environment Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/galaxy.json b/src/ansiblelint/schemas/galaxy.json new file mode 100644 index 0000000..1ff2aa4 --- /dev/null +++ b/src/ansiblelint/schemas/galaxy.json @@ -0,0 +1,549 @@ +{ + "$defs": { + "CollectionVersionConstraintModel": { + "additionalProperties": false, + "title": "CollectionVersionConstraintModel", + "type": "string" + }, + "SPDXLicense": { + "$ref": "#/$defs/SPDXLicenseEnum", + "title": "SPDXLicense" + }, + "SPDXLicenseEnum": { + "description": "An enumeration.", + "enum": [ + "0BSD", + "AAL", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "APAFML", + "APL-1.0", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Abstyles", + "Adobe-2006", + "Adobe-Glyph", + "Afmparse", + "Aladdin", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "Artistic-1.0", + "Artistic-1.0-Perl", + "Artistic-1.0-cl8", + "Artistic-2.0", + "BSD-1-Clause", + "BSD-2-Clause", + "BSD-2-Clause-Patent", + "BSD-2-Clause-Views", + "BSD-3-Clause", + "BSD-3-Clause-Attribution", + "BSD-3-Clause-Clear", + "BSD-3-Clause-LBNL", + "BSD-3-Clause-Modification", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-License", + "BSD-3-Clause-No-Nuclear-License-2014", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause", + "BSD-4-Clause-Shortened", + "BSD-4-Clause-UC", + "BSD-Protection", + "BSD-Source-Code", + "BSL-1.0", + "BUSL-1.1", + "Bahyph", + "Barr", + "Beerware", + "BitTorrent-1.0", + "BitTorrent-1.1", + "BlueOak-1.0.0", + "Borceux", + "C-UDA-1.0", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "CATOSL-1.1", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDL-1.0", + "CDLA-Permissive-1.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "CUA-OPL-1.0", + "Caldera", + "ClArtistic", + "Condor-1.1", + "Crossword", + "CrystalStacker", + "Cube", + "D-FSL-1.0", + "DOC", + "DRL-1.0", + "DSDP", + "Dotseqn", + "ECL-1.0", + "ECL-2.0", + "EFL-1.0", + "EFL-2.0", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Entessa", + "ErlPL-1.1", + "Eurosym", + "FSFAP", + "FSFUL", + "FSFULLR", + "FTL", + "Fair", + "Frameworx-1.0", + "FreeBSD-DOC", + "FreeImage", + "GD", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-only", + "GFDL-1.1-or-later", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-only", + "GFDL-1.2-or-later", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-only", + "GFDL-1.3-or-later", + "GL2PS", + "GLWTPL", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-or-later", + "Giftware", + "Glide", + "Glulxe", + "HPND", + "HPND-sell-variant", + "HTMLTIDY", + "HaskellReport", + "Hippocratic-2.1", + "IBM-pibs", + "ICU", + "IJG", + "IPA", + "IPL-1.0", + "ISC", + "ImageMagick", + "Imlib2", + "Info-ZIP", + "Intel", + "Intel-ACPI", + "Interbase-1.0", + "JPNIC", + "JSON", + "JasPer-2.0", + "LAL-1.2", + "LAL-1.3", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-or-later", + "LGPLLR", + "LPL-1.0", + "LPL-1.02", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "Latex2e", + "Leptonica", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Libpng", + "Linux-OpenIB", + "MIT", + "MIT-0", + "MIT-CMU", + "MIT-Modern-Variant", + "MIT-advertising", + "MIT-enna", + "MIT-feh", + "MIT-open-group", + "MITNFA", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-PL", + "MS-RL", + "MTLL", + "MakeIndex", + "MirOS", + "Motosoto", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "NBPL-1.0", + "NCGL-UK-2.0", + "NCSA", + "NGPL", + "NIST-PD", + "NIST-PD-fallback", + "NLOD-1.0", + "NLPL", + "NOSL", + "NPL-1.0", + "NPL-1.1", + "NPOSL-3.0", + "NRL", + "NTP", + "NTP-0", + "Naumen", + "Net-SNMP", + "NetCDF", + "Newsletr", + "Nokia", + "Noweb", + "O-UDA-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODC-By-1.0", + "ODbL-1.0", + "OFL-1.0", + "OFL-1.0-RFN", + "OFL-1.0-no-RFN", + "OFL-1.1", + "OFL-1.1-RFN", + "OFL-1.1-no-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-Canada-2.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OGTSL", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OML", + "OPL-1.0", + "OSET-PL-2.1", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "OpenSSL", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "PSF-2.0", + "Parity-6.0.0", + "Parity-7.0.0", + "Plexus", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "Python-2.0", + "QPL-1.0", + "Qhull", + "RHeCos-1.1", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "RSCPL", + "Rdisc", + "Ruby", + "SAX-PD", + "SCEA", + "SGI-B-1.0", + "SGI-B-1.1", + "SGI-B-2.0", + "SHL-0.5", + "SHL-0.51", + "SISSL", + "SISSL-1.2", + "SMLNJ", + "SMPPL", + "SNIA", + "SPL-1.0", + "SSH-OpenSSH", + "SSH-short", + "SSPL-1.0", + "SWL", + "Saxpath", + "Sendmail", + "Sendmail-8.23", + "SimPL-2.0", + "Sleepycat", + "Spencer-86", + "Spencer-94", + "Spencer-99", + "SugarCRM-1.1.3", + "TAPR-OHL-1.0", + "TCL", + "TCP-wrappers", + "TMate", + "TORQUE-1.1", + "TOSL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "UCL-1.0", + "UPL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Unlicense", + "VOSTROM", + "VSL-1.0", + "Vim", + "W3C", + "W3C-19980720", + "W3C-20150513", + "WTFPL", + "Watcom-1.0", + "Wsuipa", + "X11", + "XFree86-1.1", + "XSkat", + "Xerox", + "Xnet", + "YPL-1.0", + "YPL-1.1", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "blessing", + "bzip2-1.0.5", + "bzip2-1.0.6", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "curl", + "diffmark", + "dvipdfm", + "eGenix", + "etalab-2.0", + "gSOAP-1.3b", + "gnuplot", + "iMatix", + "libpng-2.0", + "libselinux-1.0", + "libtiff", + "mpich2", + "psfrag", + "psutils", + "xinetd", + "xpp", + "zlib-acknowledgement" + ], + "title": "SPDXLicenseEnum" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-galaxy.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["galaxy.yml"], + "properties": { + "authors": { + "items": { + "type": "string" + }, + "title": "Authors", + "type": "array" + }, + "build_ignore": { + "items": { + "type": "string" + }, + "title": "Build Ignore", + "type": "array" + }, + "dependencies": { + "additionalProperties": { + "$ref": "#/$defs/CollectionVersionConstraintModel" + }, + "title": "Dependencies", + "type": "object" + }, + "description": { + "title": "Description", + "type": "string" + }, + "documentation": { + "title": "Documentation", + "type": "string" + }, + "homepage": { + "title": "Homepage", + "type": "string" + }, + "issues": { + "title": "Issues", + "type": "string" + }, + "license": { + "items": { + "$ref": "#/$defs/SPDXLicense" + }, + "title": "License", + "type": "array" + }, + "license_file": { + "title": "License File", + "type": "string" + }, + "name": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Name", + "type": "string" + }, + "namespace": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Namespace", + "type": "string" + }, + "readme": { + "markdownDescription": "The path to the Markdown (.md) readme file. This path is relative to the root of the collection.\nSee [metadata structure](https://docs.ansible.com/ansible/latest/dev_guide/collections_galaxy_meta.html)", + "title": "Readme", + "type": "string" + }, + "repository": { + "title": "Repository", + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": "array" + }, + "version": { + "markdownDescription": "Version must use [SemVer](https://semver.org/) format, which is more restrictive than [PEP-440](https://peps.python.org/pep-0440/). For example `1.0.0-rc1` is valid but `1.0.0rc` is not.", + "minLength": 5, + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$", + "title": "Version", + "type": "string" + } + }, + "required": [ + "namespace", + "name", + "version", + "readme", + "authors", + "description", + "repository" + ], + "title": "Ansible galaxy.yml Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/inventory.json b/src/ansiblelint/schemas/inventory.json new file mode 100644 index 0000000..2c665e2 --- /dev/null +++ b/src/ansiblelint/schemas/inventory.json @@ -0,0 +1,66 @@ +{ + "$defs": { + "group": { + "properties": { + "children": { + "patternProperties": { + "[a-zA-Z-_0-9]": { + "$ref": "#/$defs/group" + } + } + }, + "hosts": { + "patternProperties": { + "[a-zA-Z.-_0-9]": { + "type": ["object", "null"] + } + }, + "type": ["object", "string"] + }, + "vars": { + "type": "object" + } + }, + "type": ["object", "null"] + }, + "special-group": { + "additionalProperties": false, + "properties": { + "children": { + "type": ["object", "null"] + }, + "groups": { + "type": ["object", "null"] + }, + "hosts": { + "type": ["object", "null"] + }, + "vars": { + "type": ["object", "null"] + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-inventory.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": true, + "description": "Ansible Inventory Schema", + "examples": [ + "inventory.yaml", + "inventory.yml", + "inventory/*.yml", + "inventory/*.yaml" + ], + "markdownDescription": "All keys at top levels are groups with `all` and `ungrouped` having a special meaning.\n\nSee [How to build your inventory](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html)", + "properties": { + "all": { + "$ref": "#/$defs/special-group" + }, + "ungrouped": { + "$ref": "#/$defs/group" + } + }, + "title": "Ansible Inventory Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/main.py b/src/ansiblelint/schemas/main.py new file mode 100644 index 0000000..5a96ce9 --- /dev/null +++ b/src/ansiblelint/schemas/main.py @@ -0,0 +1,141 @@ +"""Module containing cached JSON schemas.""" +from __future__ import annotations + +import json +import logging +import os +import time +import urllib.request +from collections import defaultdict +from functools import lru_cache +from pathlib import Path +from typing import Any +from urllib.request import Request + +import jsonschema +import yaml +from jsonschema.exceptions import ValidationError + +from ansiblelint.file_utils import Lintable +from ansiblelint.loaders import yaml_load_safe + +_logger = logging.getLogger(__package__) + + +class SchemaCacheDict(defaultdict): # type: ignore + """Caching schema store.""" + + def __missing__(self, key: str) -> Any: + """Load schema on its first use.""" + value = get_schema(key) + self[key] = value + return value + + +_schema_cache = SchemaCacheDict() + + +# Maps kinds to JSON schemas +# See https://www.schemastore.org/json/ +store_file = Path(f"{__file__}/../__store__.json").resolve() +with open(store_file, encoding="utf-8") as json_file: + JSON_SCHEMAS = json.load(json_file) + + +@lru_cache(maxsize=None) +def get_schema(kind: str) -> Any: + """Return the schema for the given kind.""" + schema_file = os.path.dirname(__file__) + "/" + kind + ".json" + with open(schema_file, encoding="utf-8") as f: + return json.load(f) + + +def validate_file_schema(file: Lintable) -> list[str]: + """Return list of JSON validation errors found.""" + if file.kind not in JSON_SCHEMAS: + return [f"Unable to find JSON Schema '{file.kind}' for '{file.path}' file."] + try: + # convert yaml to json (keys are converted to strings) + yaml_data = yaml_load_safe(file.content) + json_data = json.loads(json.dumps(yaml_data)) + # file.data = json_data + jsonschema.validate( + instance=json_data, + schema=_schema_cache[file.kind], + ) + except yaml.constructor.ConstructorError as exc: + return [f"Failed to load YAML file '{file.path}': {exc.problem}"] + except ValidationError as exc: + return [exc.message] + return [] + + +# pylint: disable=too-many-branches +def refresh_schemas(min_age_seconds: int = 3600 * 24) -> int: + """Refresh JSON schemas by downloading latest versions. + + Returns number of changed schemas. + """ + age = int(time.time() - store_file.stat().st_mtime) + + # never check for updated schemas more than once a day + if min_age_seconds > age: + return 0 + if not os.access(store_file, os.W_OK): # pragma: no cover + _logger.debug( + "Skipping schema update due to lack of writing rights on %s", store_file + ) + return -1 + _logger.debug("Checking for updated schemas...") + + changed = 0 + for kind, data in JSON_SCHEMAS.items(): + url = data["url"] + if "#" in url: + raise RuntimeError( + f"Schema URLs cannot contain # due to python-jsonschema limitation: {url}" + ) + path = Path(f"{os.path.relpath(os.path.dirname(__file__))}/{kind}.json") + _logger.debug("Refreshing %s schema ...", kind) + request = Request(url) + etag = data.get("etag", "") + if etag: + request.add_header("If-None-Match", f'"{data.get("etag")}"') + try: + with urllib.request.urlopen(request, timeout=10) as response: + if response.status == 200: + content = response.read().decode("utf-8").rstrip() + etag = response.headers["etag"].strip('"') + if etag != data.get("etag", ""): + JSON_SCHEMAS[kind]["etag"] = etag + changed += 1 + with open(f"{path}", "w", encoding="utf-8") as f_out: + _logger.info("Schema %s was updated", kind) + f_out.write(content) + f_out.write("\n") # prettier/editors + f_out.truncate() + os.fsync(f_out.fileno()) + # unload possibly loaded schema + if kind in _schema_cache: # pragma: no cover + del _schema_cache[kind] + except (ConnectionError, OSError) as exc: + if ( + isinstance(exc, urllib.error.HTTPError) + and getattr(exc, "code", None) == 304 + ): + _logger.debug("Schema %s is not modified", url) + continue + # In case of networking issues, we just stop and use last-known good + _logger.debug("Skipped schema refresh due to unexpected exception: %s", exc) + break + if changed: # pragma: no cover + with open(store_file, "w", encoding="utf-8") as f_out: + # formatting should match our .prettierrc.yaml + json.dump(JSON_SCHEMAS, f_out, indent=2, sort_keys=True) + f_out.write("\n") # prettier and editors in general + # clear schema cache + get_schema.cache_clear() + else: + store_file.touch() + changed = 1 + return changed diff --git a/src/ansiblelint/schemas/meta-runtime.json b/src/ansiblelint/schemas/meta-runtime.json new file mode 100644 index 0000000..e2d7aa8 --- /dev/null +++ b/src/ansiblelint/schemas/meta-runtime.json @@ -0,0 +1,82 @@ +{ + "$defs": { + "ActionGroup": { + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "$ref": "#/$defs/Metadata" + } + ] + }, + "type": "array" + }, + "Metadata": { + "properties": { + "metadata": { + "properties": { + "extend_group": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + } + }, + "type": "object" + }, + "Redirect": { + "properties": { + "redirect": { + "type": "string" + } + }, + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-meta-runtime.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "description": "See https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#meta-directory", + "examples": ["**/meta/runtime.yml"], + "properties": { + "action_groups": { + "additionalProperties": { + "$ref": "#/$defs/ActionGroup" + }, + "description": "A mapping of groups and the list of action plugin and module names they contain. They may also have a special ‘metadata’ dictionary in the list, which can be used to include actions from other groups.", + "title": "Action Groups", + "type": "object" + }, + "import_redirection": { + "additionalProperties": { + "$ref": "#/$defs/Redirect" + }, + "description": "A mapping of names for Python import statements and their redirected locations.", + "title": "Import Redirection", + "type": "object" + }, + "plugin_routing": { + "markdownDescription": "Content in a collection that Ansible needs to load from another location or that has been deprecated/removed. The top level keys of plugin_routing are types of plugins, with individual plugin names as subkeys. To define a new location for a plugin, set the redirect field to another name. To deprecate a plugin, use the deprecation field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the redirect field should also be provided. If a plugin is being removed entirely, tombstone can be used for the fatal error message and removal version or date.", + "properties": { + "inventory": {}, + "module_utils": {}, + "modules": {} + }, + "title": "Plugin Routing", + "type": "object" + }, + "requires_ansible": { + "examples": [">=2.10,<2.11"], + "pattern": "^[^\\s]*$", + "title": "The version of Ansible Core (ansible-core) required to use the collection. Multiple versions can be separated with a comma.", + "type": "string" + } + }, + "title": "Ansible Meta Runtime Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/meta.json b/src/ansiblelint/schemas/meta.json new file mode 100644 index 0000000..41cf21a --- /dev/null +++ b/src/ansiblelint/schemas/meta.json @@ -0,0 +1,1381 @@ +{ + "$defs": { + "AIXPlatformModel": { + "properties": { + "name": { + "const": "AIX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["6.1", "7.1", "7.2", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "AIXPlatformModel", + "type": "object" + }, + "AlpinePlatformModel": { + "properties": { + "name": { + "const": "Alpine", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "AlpinePlatformModel", + "type": "object" + }, + "AmazonPlatformModel": { + "properties": { + "name": { + "const": "Amazon", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "2013.03", + "2013.09", + "2014.03", + "2014.09", + "2015.03", + "2015.09", + "2016.03", + "2016.09", + "2017.03", + "2017.09", + "2017.12", + "2018.03", + "Candidate", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "AmazonPlatformModel", + "type": "object" + }, + "Amazon_Linux_2PlatformModel": { + "properties": { + "name": { + "const": "Amazon Linux 2", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "Amazon Linux 2PlatformModel", + "type": "object" + }, + "ArchLinuxPlatformModel": { + "properties": { + "name": { + "const": "ArchLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ArchLinuxPlatformModel", + "type": "object" + }, + "ClearLinuxPlatformModel": { + "properties": { + "name": { + "const": "ClearLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ClearLinuxPlatformModel", + "type": "object" + }, + "CumulusPlatformModel": { + "properties": { + "name": { + "const": "Cumulus", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["2.5", "3.0", "3.1", "3.2", "3.3", "3.4", "3.5", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "CumulusPlatformModel", + "type": "object" + }, + "DebianPlatformModel": { + "properties": { + "name": { + "const": "Debian", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "bookworm", + "bullseye", + "buster", + "etch", + "jessie", + "lenny", + "sid", + "squeeze", + "stretch", + "wheezy", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "DebianPlatformModel", + "type": "object" + }, + "DellOSPlatformModel": { + "properties": { + "name": { + "const": "DellOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "6", "9", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DellOSPlatformModel", + "type": "object" + }, + "DependencyModel": { + "additionalProperties": true, + "anyOf": [ + { + "required": ["role"] + }, + { + "required": ["src"] + }, + { + "required": ["name"] + } + ], + "markdownDescription": "See https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies and https://github.com/ansible/ansible/blob/devel/lib/ansible/playbook/role/metadata.py#L79\n\nOther keys are treated as role [parameters](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#passing-different-parameters).", + "properties": { + "become": { + "title": "Become", + "type": "boolean" + }, + "name": { + "title": "Name", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "scm": { + "enum": ["hg", "git"], + "title": "Scm", + "type": "string" + }, + "src": { + "title": "Src", + "type": "string" + }, + "tags": { + "items": { + "type": "string" + }, + "title": "Tags", + "type": ["array", "string"] + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "version": { + "title": "Version", + "type": "string" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "title": "Dependency entry", + "type": "object" + }, + "DevuanPlatformModel": { + "properties": { + "name": { + "const": "Devuan", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["ascii", "beowulf", "ceres", "jessie", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DevuanPlatformModel", + "type": "object" + }, + "DragonFlyBSDPlatformModel": { + "properties": { + "name": { + "const": "DragonFlyBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.2", "5.4", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "DragonFlyBSDPlatformModel", + "type": "object" + }, + "ELPlatformModel": { + "properties": { + "name": { + "const": "EL", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5", "6", "7", "8", "9", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "ELPlatformModel", + "type": "object" + }, + "FedoraPlatformModel": { + "properties": { + "name": { + "const": "Fedora", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "16", + "17", + "18", + "19", + "20", + "21", + "22", + "23", + "24", + "25", + "26", + "27", + "28", + "29", + "30", + "31", + "32", + "33", + "34", + "35", + "36", + "37", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "FedoraPlatformModel", + "type": "object" + }, + "FreeBSDPlatformModel": { + "properties": { + "name": { + "const": "FreeBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10.0", + "10.1", + "10.2", + "10.3", + "10.4", + "11.0", + "11.1", + "11.2", + "11.3", + "11.4", + "12.0", + "12.1", + "12.2", + "13.0", + "8.0", + "8.1", + "8.2", + "8.3", + "8.4", + "9.0", + "9.1", + "9.2", + "9.3", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "FreeBSDPlatformModel", + "type": "object" + }, + "GalaxyInfoModel": { + "additionalProperties": false, + "allOf": [ + { + "if": { + "properties": { + "standalone": { + "const": true + } + } + }, + "then": { + "$comment": "Standalone role, so we require several fields.", + "required": [ + "author", + "description", + "license", + "min_ansible_version" + ] + } + }, + { + "if": { + "properties": { + "standalone": { + "const": false + } + } + }, + "then": { + "$comment": "Collection roles do not use most galaxy fields.", + "not": { + "required": [ + "cloud_platforms", + "galaxy_tags", + "min_ansible_version", + "namespace", + "platforms", + "role_name" + ] + }, + "required": ["description"] + } + } + ], + "else": { + "$comment": "If standalone is false, then we have a collection role and only description is required", + "required": ["description"] + }, + "properties": { + "author": { + "title": "Author", + "type": "string" + }, + "company": { + "title": "Company", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + }, + "galaxy_tags": { + "items": { + "type": "string" + }, + "markdownDescription": "See https://galaxy.ansible.com/docs/contributing/creating_role.html", + "title": "Galaxy Tags", + "type": "array" + }, + "github_branch": { + "markdownDescription": "Optionally specify the branch Galaxy will use when accessing the GitHub repo for this role", + "title": "GitHub Branch", + "type": "string" + }, + "issue_tracker_url": { + "title": "Issue Tracker Url", + "type": "string" + }, + "license": { + "title": "License", + "type": "string" + }, + "min_ansible_container_version": { + "title": "Min Ansible Container Version", + "type": "string" + }, + "min_ansible_version": { + "title": "Min Ansible Version", + "type": "string" + }, + "namespace": { + "markdownDescription": "Used by molecule and ansible-lint to compute FQRN for roles outside collections", + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Namespace Name", + "type": "string" + }, + "platforms": { + "$ref": "#/$defs/platforms" + }, + "role_name": { + "minLength": 2, + "pattern": "^[a-z][a-z0-9_]+$", + "title": "Role Name", + "type": "string" + }, + "standalone": { + "description": "Set to true for old standalone roles, or false for new collection roles.", + "title": "Standalone", + "type": "boolean" + } + }, + "title": "GalaxyInfoModel", + "type": "object" + }, + "GenericBSDPlatformModel": { + "properties": { + "name": { + "const": "GenericBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericBSDPlatformModel", + "type": "object" + }, + "GenericLinuxPlatformModel": { + "properties": { + "name": { + "const": "GenericLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericLinuxPlatformModel", + "type": "object" + }, + "GenericUNIXPlatformModel": { + "properties": { + "name": { + "const": "GenericUNIX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GenericUNIXPlatformModel", + "type": "object" + }, + "GentooPlatformModel": { + "properties": { + "name": { + "const": "Gentoo", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "GentooPlatformModel", + "type": "object" + }, + "HardenedBSDPlatformModel": { + "properties": { + "name": { + "const": "HardenedBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "11", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "HardenedBSDPlatformModel", + "type": "object" + }, + "IOSPlatformModel": { + "properties": { + "name": { + "const": "IOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "IOSPlatformModel", + "type": "object" + }, + "JunosPlatformModel": { + "properties": { + "name": { + "const": "Junos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "JunosPlatformModel", + "type": "object" + }, + "MacOSXPlatformModel": { + "properties": { + "name": { + "const": "MacOSX", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10.10", + "10.11", + "10.12", + "10.13", + "10.14", + "10.15", + "10.7", + "10.8", + "10.9", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "MacOSXPlatformModel", + "type": "object" + }, + "MageiaPlatformModel": { + "properties": { + "name": { + "const": "Mageia", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["7", "8", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "MageiaPlatformModel", + "type": "object" + }, + "NXOSPlatformModel": { + "properties": { + "name": { + "const": "NXOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "NXOSPlatformModel", + "type": "object" + }, + "OpenBSDPlatformModel": { + "properties": { + "name": { + "const": "OpenBSD", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "5.6", + "5.7", + "5.8", + "5.9", + "6.0", + "6.1", + "6.2", + "6.3", + "6.4", + "6.5", + "6.6", + "6.7", + "6.8", + "6.9", + "7.0", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "OpenBSDPlatformModel", + "type": "object" + }, + "OpenWrtPlatformModel": { + "properties": { + "name": { + "const": "OpenWrt", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["17.01", "18.06", "19.07", "21.02", "22.03", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "OpenWrtPlatformModel", + "type": "object" + }, + "OracleLinuxPlatformModel": { + "properties": { + "name": { + "const": "OracleLinux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "8.0", + "8.1", + "8.2", + "8.3", + "8.4", + "8.5", + "8.6", + "8.7", + "9.0", + "9.1", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "OracleLinuxPlatformModel", + "type": "object" + }, + "PAN-OSPlatformModel": { + "properties": { + "name": { + "const": "PAN-OS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["7.1", "8.0", "8.1", "9.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "PAN-OSPlatformModel", + "type": "object" + }, + "SLESPlatformModel": { + "properties": { + "name": { + "const": "SLES", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "10SP3", + "10SP4", + "11", + "11SP1", + "11SP2", + "11SP3", + "11SP4", + "12", + "12SP1", + "12SP2", + "12SP3", + "12SP4", + "12SP5", + "15", + "15SP1", + "15SP2", + "15SP3", + "15SP4", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "SLESPlatformModel", + "type": "object" + }, + "SmartOSPlatformModel": { + "properties": { + "name": { + "const": "SmartOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SmartOSPlatformModel", + "type": "object" + }, + "SolarisPlatformModel": { + "properties": { + "name": { + "const": "Solaris", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["10", "11.0", "11.1", "11.2", "11.3", "11.4", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SolarisPlatformModel", + "type": "object" + }, + "SynologyPlatformModel": { + "properties": { + "name": { + "const": "Synology", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["6.0", "6.1", "6.2", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "SynologyPlatformModel", + "type": "object" + }, + "TMOSPlatformModel": { + "properties": { + "name": { + "const": "TMOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["12.1", "13.0", "13.1", "14.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "TMOSPlatformModel", + "type": "object" + }, + "UbuntuPlatformModel": { + "properties": { + "name": { + "const": "Ubuntu", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "artful", + "bionic", + "cosmic", + "cuttlefish", + "disco", + "eoan", + "focal", + "groovy", + "hirsute", + "impish", + "jammy", + "lucid", + "maverick", + "natty", + "oneiric", + "precise", + "quantal", + "raring", + "saucy", + "trusty", + "utopic", + "vivid", + "wily", + "xenial", + "yakkety", + "zesty", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "UbuntuPlatformModel", + "type": "object" + }, + "Void_LinuxPlatformModel": { + "properties": { + "name": { + "const": "Void Linux", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "Void LinuxPlatformModel", + "type": "object" + }, + "WindowsPlatformModel": { + "properties": { + "name": { + "const": "Windows", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "2008R2", + "2008x64", + "2008x86", + "2012", + "2012R2", + "2016", + "2019", + "2022", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "WindowsPlatformModel", + "type": "object" + }, + "aosPlatformModel": { + "properties": { + "name": { + "const": "aos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "aosPlatformModel", + "type": "object" + }, + "collections": { + "items": { + "markdownDescription": "See [Using collections in roles](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html#using-collections-in-roles) and [collection naming conventions](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules_in_groups.html#naming-conventions)", + "pattern": "^[a-z_]+\\.[a-z_]+$", + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "eosPlatformModel": { + "properties": { + "name": { + "const": "eos", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "eosPlatformModel", + "type": "object" + }, + "macOSPlatformModel": { + "properties": { + "name": { + "const": "macOS", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "Big-Sur", + "Catalina", + "High-Sierra", + "Mojave", + "Monterey", + "Sierra", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "macOSPlatformModel", + "type": "object" + }, + "opensusePlatformModel": { + "properties": { + "name": { + "const": "opensuse", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": [ + "12.1", + "12.2", + "12.3", + "13.1", + "13.2", + "15.0", + "15.1", + "15.2", + "15.3", + "15.4", + "15.5", + "42.1", + "42.2", + "42.3", + "all" + ], + "type": "string" + }, + "type": "array" + } + }, + "title": "opensusePlatformModel", + "type": "object" + }, + "os10PlatformModel": { + "properties": { + "name": { + "const": "os10", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "os10PlatformModel", + "type": "object" + }, + "platforms": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/AIXPlatformModel" + }, + { + "$ref": "#/$defs/AlpinePlatformModel" + }, + { + "$ref": "#/$defs/AmazonPlatformModel" + }, + { + "$ref": "#/$defs/Amazon_Linux_2PlatformModel" + }, + { + "$ref": "#/$defs/aosPlatformModel" + }, + { + "$ref": "#/$defs/ArchLinuxPlatformModel" + }, + { + "$ref": "#/$defs/ClearLinuxPlatformModel" + }, + { + "$ref": "#/$defs/CumulusPlatformModel" + }, + { + "$ref": "#/$defs/DebianPlatformModel" + }, + { + "$ref": "#/$defs/DellOSPlatformModel" + }, + { + "$ref": "#/$defs/DevuanPlatformModel" + }, + { + "$ref": "#/$defs/DragonFlyBSDPlatformModel" + }, + { + "$ref": "#/$defs/ELPlatformModel" + }, + { + "$ref": "#/$defs/eosPlatformModel" + }, + { + "$ref": "#/$defs/FedoraPlatformModel" + }, + { + "$ref": "#/$defs/FreeBSDPlatformModel" + }, + { + "$ref": "#/$defs/GenericBSDPlatformModel" + }, + { + "$ref": "#/$defs/GenericLinuxPlatformModel" + }, + { + "$ref": "#/$defs/GenericUNIXPlatformModel" + }, + { + "$ref": "#/$defs/GentooPlatformModel" + }, + { + "$ref": "#/$defs/HardenedBSDPlatformModel" + }, + { + "$ref": "#/$defs/IOSPlatformModel" + }, + { + "$ref": "#/$defs/JunosPlatformModel" + }, + { + "$ref": "#/$defs/macOSPlatformModel" + }, + { + "$ref": "#/$defs/MacOSXPlatformModel" + }, + { + "$ref": "#/$defs/MageiaPlatformModel" + }, + { + "$ref": "#/$defs/NXOSPlatformModel" + }, + { + "$ref": "#/$defs/OpenBSDPlatformModel" + }, + { + "$ref": "#/$defs/opensusePlatformModel" + }, + { + "$ref": "#/$defs/OpenWrtPlatformModel" + }, + { + "$ref": "#/$defs/OracleLinuxPlatformModel" + }, + { + "$ref": "#/$defs/os10PlatformModel" + }, + { + "$ref": "#/$defs/PAN-OSPlatformModel" + }, + { + "$ref": "#/$defs/SLESPlatformModel" + }, + { + "$ref": "#/$defs/SmartOSPlatformModel" + }, + { + "$ref": "#/$defs/SolarisPlatformModel" + }, + { + "$ref": "#/$defs/SynologyPlatformModel" + }, + { + "$ref": "#/$defs/TMOSPlatformModel" + }, + { + "$ref": "#/$defs/UbuntuPlatformModel" + }, + { + "$ref": "#/$defs/vCenterPlatformModel" + }, + { + "$ref": "#/$defs/Void_LinuxPlatformModel" + }, + { + "$ref": "#/$defs/vSpherePlatformModel" + }, + { + "$ref": "#/$defs/WindowsPlatformModel" + } + ] + }, + "title": "Platforms", + "type": "array" + }, + "vCenterPlatformModel": { + "properties": { + "name": { + "const": "vCenter", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.5", "6.0", "6.5", "6.7", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "vCenterPlatformModel", + "type": "object" + }, + "vSpherePlatformModel": { + "properties": { + "name": { + "const": "vSphere", + "title": "Name", + "type": "string" + }, + "versions": { + "default": "all", + "items": { + "enum": ["5.5", "6.0", "6.5", "6.7", "7.0", "all"], + "type": "string" + }, + "type": "array" + } + }, + "title": "vSpherePlatformModel", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/meta.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": ["meta/main.yml"], + "properties": { + "additionalProperties": false, + "allow_duplicates": { + "title": "Allow Duplicates", + "type": "boolean" + }, + "collections": { + "$ref": "#/$defs/collections" + }, + "dependencies": { + "items": { + "$ref": "#/$defs/DependencyModel" + }, + "title": "Dependencies", + "type": "array" + }, + "galaxy_info": { + "$ref": "#/$defs/GalaxyInfoModel" + } + }, + "title": "Ansible Meta Schema v1/v2", + "type": ["object", "null"] +} diff --git a/src/ansiblelint/schemas/molecule.json b/src/ansiblelint/schemas/molecule.json new file mode 100644 index 0000000..5c45a5e --- /dev/null +++ b/src/ansiblelint/schemas/molecule.json @@ -0,0 +1,561 @@ +{ + "$defs": { + "ContainerRegistryModel": { + "additionalProperties": false, + "properties": { + "url": { + "title": "Url", + "type": "string" + } + }, + "required": ["url"], + "title": "ContainerRegistryModel", + "type": "object" + }, + "MoleculeDependencyModel": { + "additionalProperties": false, + "properties": { + "command": { + "title": "Command", + "type": ["string", "null"] + }, + "enabled": { + "default": true, + "title": "Enabled", + "type": "boolean" + }, + "env": { + "title": "Env", + "type": "object" + }, + "name": { + "enum": ["galaxy", "shell"], + "title": "Name", + "type": "string" + }, + "options": { + "title": "Options", + "type": "object" + } + }, + "required": ["name"], + "title": "MoleculeDependencyModel", + "type": "object" + }, + "MoleculeDriverModel": { + "additionalProperties": false, + "properties": { + "cachier": { + "title": "Cachier", + "type": "string" + }, + "default_box": { + "title": "DefaultBox", + "type": "string" + }, + "name": { + "enum": [ + "azure", + "ec2", + "delegated", + "docker", + "containers", + "openstack", + "podman", + "vagrant", + "digitalocean", + "gce", + "libvirt", + "lxd" + ], + "title": "Name", + "type": "string" + }, + "options": { + "$ref": "#/$defs/MoleculeDriverOptionsModel" + }, + "parallel": { + "title": "Parallel", + "type": "boolean" + }, + "provider": { + "title": "Provider", + "type": "object" + }, + "provision": { + "title": "Provision", + "type": "boolean" + }, + "safe_files": { + "items": { + "type": "string" + }, + "title": "SafeFiles", + "type": "array" + }, + "ssh_connection_options": { + "items": { + "type": "string" + }, + "title": "SshConnectionOptions", + "type": "array" + } + }, + "title": "MoleculeDriverModel", + "type": "object" + }, + "MoleculeDriverOptionsModel": { + "additionalProperties": false, + "properties": { + "ansible_connection_options": { + "additionalProperties": { + "type": "string" + }, + "title": "Ansible Connection Options", + "type": "object" + }, + "login_cmd_template": { + "title": "Login Cmd Template", + "type": "string" + }, + "managed": { + "title": "Managed", + "type": "boolean" + } + }, + "title": "MoleculeDriverOptionsModel", + "type": "object" + }, + "MoleculePlatformModel": { + "additionalProperties": true, + "properties": { + "box": { + "title": "Box", + "type": "string" + }, + "cgroupns": { + "title": "Cgroupns", + "type": "string" + }, + "children": { + "items": { + "type": "string" + }, + "type": "array" + }, + "command": { + "title": "Command", + "type": "string" + }, + "cpus": { + "title": "Cpus", + "type": "integer" + }, + "dockerfile": { + "title": "Dockerfile", + "type": "string" + }, + "env": { + "items": { + "type": "object" + }, + "title": "Platform Environment Variables", + "type": "array" + }, + "environment": { + "additionalProperties": { + "type": "string" + }, + "title": "Environment", + "type": "object" + }, + "groups": { + "items": { + "type": "string" + }, + "title": "Groups", + "type": "array" + }, + "hostname": { + "title": "Hostname", + "type": ["string", "boolean"] + }, + "image": { + "title": "Image", + "type": ["string", "null"] + }, + "interfaces": { + "title": "Interfaces", + "type": "array" + }, + "memory": { + "title": "Memory", + "type": "integer" + }, + "name": { + "title": "Name", + "type": "string" + }, + "network_mode": { + "anyOf": [ + { + "enum": ["bridge", "host", "none"], + "type": "string" + }, + { + "pattern": "^service:[a-zA-Z0-9:_.\\\\-]+$", + "type": "string" + }, + { + "pattern": "^container:[a-zA-Z0-9][a-zA-Z0-9_.-]+$", + "type": "string" + } + ], + "title": "Network Mode" + }, + "networks": { + "items": { + "$ref": "#/$defs/platform-network" + }, + "markdownDescription": "Used by docker and podman drivers.", + "title": "Networks", + "type": "array" + }, + "pkg_extras": { + "title": "Pkg Extras", + "type": "string" + }, + "pre_build_image": { + "title": "Pre Build Image", + "type": "boolean" + }, + "privileged": { + "title": "Privileged", + "type": "boolean" + }, + "provider_options": { + "title": "Provider options", + "type": "object" + }, + "provider_raw_config_args": { + "items": { + "type": "string" + }, + "title": "Provider Raw Config Args", + "type": "array" + }, + "registry": { + "$ref": "#/$defs/ContainerRegistryModel" + }, + "tmpfs": { + "items": { + "type": "string" + }, + "title": "Tmpfs", + "type": "array" + }, + "ulimits": { + "items": { + "type": "string" + }, + "title": "Ulimits", + "type": "array" + }, + "volumes": { + "items": { + "type": "string" + }, + "title": "Volumes", + "type": "array" + } + }, + "required": ["name"], + "title": "MoleculePlatformModel", + "type": "object" + }, + "MoleculeScenarioModel": { + "additionalProperties": false, + "properties": { + "check_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "cleanup_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "converge_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "create_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "dependency_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "destroy_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "idempotence_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "lint_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "name": { + "title": "Name", + "type": "string" + }, + "prepare_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "side_effect_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "syntax_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "test_sequence": { + "$ref": "#/$defs/ScenarioSequence" + }, + "verify_sequence": { + "$ref": "#/$defs/ScenarioSequence" + } + }, + "title": "MoleculeScenarioModel", + "type": "object" + }, + "ProvisionerConfigOptionsDefaultsModel": { + "additionalProperties": true, + "properties": { + "ansible_managed": { + "default": "Ansible managed: Do NOT edit this file manually!", + "title": "Ansible Managed", + "type": "string" + }, + "display_failed_stderr": { + "default": true, + "title": "Display Failed Stderr", + "type": "boolean" + }, + "fact_caching": { + "title": "Fact Caching", + "type": "string" + }, + "fact_caching_connection": { + "title": "Fact Caching Connection", + "type": "string" + }, + "forks": { + "default": 50, + "title": "Forks", + "type": "integer" + }, + "host_key_checking": { + "default": false, + "title": "Host Key Checking", + "type": "boolean" + }, + "interpreter_python": { + "default": "auto_silent", + "description": "See https://docs.ansible.com/ansible/devel/reference_appendices/interpreter_discovery.html", + "title": "Interpreter Python", + "type": "string" + }, + "nocows": { + "default": 1, + "title": "Nocows", + "type": "integer" + }, + "retry_files_enabled": { + "default": false, + "title": "Retry Files Enabled", + "type": "boolean" + } + }, + "title": "ProvisionerConfigOptionsDefaultsModel", + "type": "object" + }, + "ProvisionerConfigOptionsModel": { + "additionalProperties": true, + "properties": { + "defaults": { + "$ref": "#/$defs/ProvisionerConfigOptionsDefaultsModel" + }, + "ssh_connection": { + "$ref": "#/$defs/ProvisionerConfigOptionsSshConnectionModel" + } + }, + "title": "ProvisionerConfigOptionsModel", + "type": "object" + }, + "ProvisionerConfigOptionsSshConnectionModel": { + "additionalProperties": false, + "properties": { + "control_path": { + "default": "%(directory)s/%%h-%%p-%%r", + "title": "Control Path", + "type": "string" + }, + "scp_if_ssh": { + "default": true, + "title": "Scp If Ssh", + "type": "boolean" + } + }, + "title": "ProvisionerConfigOptionsSshConnectionModel", + "type": "object" + }, + "ProvisionerModel": { + "additionalProperties": true, + "properties": { + "config_options": { + "$ref": "#/$defs/ProvisionerConfigOptionsModel" + }, + "env": { + "title": "Env", + "type": "object" + }, + "inventory": { + "title": "Inventory", + "type": "object" + }, + "log": { + "title": "Log", + "type": "boolean" + }, + "name": { + "enum": ["ansible"], + "title": "Name", + "type": "string" + }, + "playbooks": { + "title": "Playbooks", + "type": "object" + } + }, + "title": "ProvisionerModel", + "type": "object" + }, + "ScenarioSequence": { + "additionalProperties": false, + "items": { + "enum": [ + "check", + "cleanup", + "converge", + "create", + "dependency", + "destroy", + "idempotence", + "lint", + "prepare", + "side_effect", + "syntax", + "test", + "verify" + ], + "type": "string" + }, + "title": "ScenarioSequence", + "type": "array" + }, + "VerifierModel": { + "additionalProperties": false, + "properties": { + "additional_files_or_dirs": { + "items": { + "type": "string" + }, + "title": "AdditionalFilesOrDirs", + "type": "array" + }, + "enabled": { + "title": "Enabled", + "type": "boolean" + }, + "env": { + "title": "Env", + "type": "object" + }, + "name": { + "default": "ansible", + "enum": ["ansible", "goss", "inspec", "testinfra"], + "title": "Name", + "type": "string" + }, + "options": { + "title": "Options", + "type": "object" + } + }, + "title": "VerifierModel", + "type": "object" + }, + "platform-network": { + "properties": { + "aliases": { + "items": { + "type": "string" + }, + "type": "array" + }, + "ipv4_address": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": ["name"], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansible-lint/schemas/molecule.json", + "$schema": "http://json-schema.org/draft-07/schema", + "additionalProperties": false, + "examples": ["molecule/*/molecule.yml"], + "properties": { + "dependency": { + "$ref": "#/$defs/MoleculeDependencyModel" + }, + "driver": { + "$ref": "#/$defs/MoleculeDriverModel" + }, + "lint": { + "title": "Lint", + "type": "string" + }, + "log": { + "default": true, + "title": "Log", + "type": "boolean" + }, + "platforms": { + "items": { + "$ref": "#/$defs/MoleculePlatformModel" + }, + "title": "Platforms", + "type": "array" + }, + "prerun": { + "title": "Prerun", + "type": "boolean" + }, + "provisioner": { + "$ref": "#/$defs/ProvisionerModel" + }, + "role_name_check": { + "enum": [0, 1, 2], + "title": "RoleNameCheck", + "type": "integer" + }, + "scenario": { + "$ref": "#/$defs/MoleculeScenarioModel" + }, + "verifier": { + "$ref": "#/$defs/VerifierModel" + } + }, + "required": ["driver", "platforms"], + "title": "Molecule Scenario Schema", + "type": "object" +} diff --git a/src/ansiblelint/schemas/playbook.json b/src/ansiblelint/schemas/playbook.json new file mode 100644 index 0000000..2ed4472 --- /dev/null +++ b/src/ansiblelint/schemas/playbook.json @@ -0,0 +1,1221 @@ +{ + "$comment": "Generated from ansible.json, do not edit.", + "$defs": { + "ansible.builtin.import_playbook": { + "additionalProperties": false, + "oneOf": [ + { + "not": { + "required": [ + "import_playbook" + ] + }, + "required": [ + "ansible.builtin.import_playbook" + ] + }, + { + "not": { + "required": [ + "ansible.builtin.import_playbook" + ] + }, + "required": [ + "import_playbook" + ] + } + ], + "patternProperties": { + "^(ansible\\.builtin\\.)?import_playbook$": { + "markdownDescription": "* Includes a file with a list of plays to be executed.\n * Files with a list of plays can only be included at the top level.\n * You cannot use this action inside a play.\n\nSee [import_playbook](https://docs.ansible.com/ansible/latest/collections/ansible/builtin/import_playbook_module.html)", + "title": "Import Playbook", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "type": "object" + }, + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "block" + ], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "play": { + "additionalProperties": false, + "allOf": [ + { + "not": { + "required": [ + "ansible.builtin.import_playbook" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + } + ], + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "fact_path": { + "title": "Fact Path", + "type": "string" + }, + "force_handlers": { + "title": "Force Handlers", + "type": "boolean" + }, + "gather_facts": { + "title": "Gather Facts", + "type": "boolean" + }, + "gather_subset": { + "items": { + "anyOf": [ + { + "enum": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ], + "type": "string" + }, + { + "enum": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ], + "type": "string" + } + ] + }, + "title": "Gather Subset", + "type": "array" + }, + "gather_timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Gather Timeout" + }, + "handlers": { + "$ref": "#/$defs/tasks" + }, + "hosts": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Hosts" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "max_fail_percentage": { + "title": "Max Fail Percentage", + "type": "number" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "order": { + "enum": [ + "default", + "sorted", + "reverse_sorted", + "reverse_inventory", + "shuffle" + ], + "title": "Order", + "type": "string" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "post_tasks": { + "$ref": "#/$defs/tasks" + }, + "pre_tasks": { + "$ref": "#/$defs/tasks" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "roles": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/play-role" + }, + { + "type": "string" + } + ] + }, + "markdownDescription": "Roles let you automatically load related vars, files, tasks, handlers, and other Ansible artifacts based on a known file structure. After you group your content in roles, you can easily reuse them and share them with other users.\n See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "title": "Roles", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "serial": { + "anyOf": [ + { + "$ref": "#/$defs/templated-integer-or-percent" + }, + { + "items": { + "$ref": "#/$defs/templated-integer-or-percent" + }, + "type": "array" + } + ], + "markdownDescription": "Integer, percentage or list of those. See [Setting the batch size with serial](https://docs.ansible.com/ansible/latest/user_guide/playbooks_strategies.html#setting-the-batch-size-with-serial)", + "title": "Batch size" + }, + "strategy": { + "title": "Strategy", + "type": "string" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "tasks": { + "$ref": "#/$defs/tasks" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "user": { + "title": "Remote User", + "type": "string" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "vars_files": { + "items": { + "type": "string" + }, + "title": "Vars Files", + "type": [ + "array", + "string", + "null" + ] + }, + "vars_prompt": { + "items": { + "$ref": "#/$defs/vars_prompt" + }, + "markdownDescription": "See [vars_prompt](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html)", + "title": "vars_prompt", + "type": "array" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "hosts" + ], + "title": "play", + "type": "object" + }, + "play-role": { + "markdownDescription": "See [roles](https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#roles)", + "properties": { + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "role": { + "title": "Role", + "type": "string" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "role" + ], + "title": "play-role", + "type": "object" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": [ + "hosts" + ] + } + }, + { + "not": { + "required": [ + "tasks" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + }, + { + "not": { + "required": [ + "block" + ] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": [ + "string", + "object" + ] + }, + "loop": { + "title": "Loop", + "type": [ + "string", + "array" + ] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "tasks": { + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "tasks/*.yml", + "handlers/*.yml" + ], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": [ + "array", + "null" + ] + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer-or-percent": { + "oneOf": [ + { + "type": "integer" + }, + { + "pattern": "^\\d+\\.?\\d*%?$", + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "vars_prompt": { + "additionalProperties": false, + "properties": { + "confirm": { + "title": "Confirm", + "type": "boolean" + }, + "default": { + "title": "Default", + "type": "string" + }, + "encrypt": { + "enum": [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", + "md5_crypt", + "bcrypt", + "sha1_crypt", + "sun_md5_crypt", + "sha256_crypt", + "sha512_crypt", + "apr_md5_crypt", + "phpass", + "pbkdf2_digest", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "scram", + "bsd_nthash" + ], + "title": "Encrypt", + "type": "string" + }, + "name": { + "title": "Name", + "type": "string" + }, + "private": { + "default": true, + "title": "Private", + "type": "boolean" + }, + "prompt": { + "title": "Prompt", + "type": "string" + }, + "salt_size": { + "default": 8, + "title": "Salt Size", + "type": "integer" + }, + "unsafe": { + "default": false, + "markdownDescription": "See [unsafe](https://docs.ansible.com/ansible/latest/user_guide/playbooks_prompts.html#allowing-special-characters-in-vars-prompt-values)", + "title": "Unsafe", + "type": "boolean" + } + }, + "required": [ + "name", + "prompt" + ], + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/playbook.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "playbooks/*.yml", + "playbooks/*.yaml" + ], + "items": { + "oneOf": [ + { + "$ref": "#/$defs/ansible.builtin.import_playbook" + }, + { + "$ref": "#/$defs/play" + } + ] + }, + "title": "Ansible Playbook", + "type": "array" +} diff --git a/src/ansiblelint/schemas/requirements.json b/src/ansiblelint/schemas/requirements.json new file mode 100644 index 0000000..73c8a85 --- /dev/null +++ b/src/ansiblelint/schemas/requirements.json @@ -0,0 +1,135 @@ +{ + "$defs": { + "CollectionModel": { + "additionalProperties": false, + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "source": { + "title": "Source", + "type": "string" + }, + "type": { + "enum": ["galaxy", "url", "file", "git", "dir", "subdirs"], + "title": "Type", + "type": "string" + }, + "version": { + "title": "Version", + "type": "string" + } + }, + "title": "CollectionModel", + "type": "object" + }, + "CollectionStringModel": { + "title": "CollectionStringModel", + "type": "string" + }, + "IncludeModel": { + "properties": { + "include": { + "title": "Include", + "type": "string" + } + }, + "required": ["include"], + "title": "IncludeModel", + "type": "object" + }, + "RequirementsV2Model": { + "additionalProperties": false, + "anyOf": [ + { + "required": ["collections"] + }, + { + "required": ["roles"] + } + ], + "properties": { + "collections": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/CollectionModel" + }, + { + "$ref": "#/$defs/CollectionStringModel" + } + ] + }, + "title": "Collections", + "type": "array" + }, + "roles": { + "items": { + "$ref": "#/$defs/RoleModel" + }, + "title": "Roles", + "type": "array" + } + }, + "title": "Requirements v2", + "type": "object" + }, + "RoleModel": { + "additionalProperties": false, + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "scm": { + "anyOf": [ + { + "enum": ["git"], + "type": "string" + }, + { + "enum": ["hg"], + "type": "string" + } + ], + "default": "git", + "title": "Scm" + }, + "src": { + "title": "Src", + "type": "string" + }, + "version": { + "default": "master", + "title": "Version", + "type": "string" + } + }, + "title": "Role", + "type": "object" + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-requirements.json", + "$schema": "http://json-schema.org/draft-07/schema", + "anyOf": [ + { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/RoleModel" + }, + { + "$ref": "#/$defs/IncludeModel" + } + ] + }, + "type": "array" + }, + { + "$ref": "#/$defs/RequirementsV2Model" + } + ], + "examples": ["requirements.yml"], + "title": "Ansible Requirements Schema" +} diff --git a/src/ansiblelint/schemas/tasks.json b/src/ansiblelint/schemas/tasks.json new file mode 100644 index 0000000..51f5fb3 --- /dev/null +++ b/src/ansiblelint/schemas/tasks.json @@ -0,0 +1,574 @@ +{ + "$comment": "Generated from ansible.json, do not edit.", + "$defs": { + "become_method": { + "markdownDescription": "See [become](https://docs.ansible.com/ansible/latest/user_guide/become.html)", + "oneOf": [ + { + "enum": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ], + "type": "string" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Become Method" + }, + "block": { + "properties": { + "always": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Always", + "type": "array" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "block": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "markdownDescription": "Blocks create logical groups of tasks. Blocks also offer ways to handle task errors, similar to exception handling in many programming languages. See [blocks](https://docs.ansible.com/ansible/latest/user_guide/playbooks_blocks.html)", + "title": "Block", + "type": "array" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean" + }, + "port": { + "$ref": "#/$defs/templated-integer" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "rescue": { + "items": { + "anyOf": [ + { + "$ref": "#/$defs/task" + }, + { + "$ref": "#/$defs/block" + } + ] + }, + "title": "Rescue", + "type": "array" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + } + }, + "required": [ + "block" + ], + "type": "object" + }, + "complex_conditional": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ] + }, + "environment": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja" + } + ], + "title": "Environment" + }, + "full-jinja": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$", + "type": "string" + }, + "ignore_errors": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "See [ignore_errors](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#ignoring-failed-commands)", + "title": "Ignore Errors" + }, + "no_log": { + "$ref": "#/$defs/templated-boolean", + "markdownDescription": "Use for protecting sensitive data. See [no_log](https://docs.ansible.com/ansible/latest/reference_appendices/logging.html)", + "title": "no_log" + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Tags" + }, + "task": { + "additionalProperties": true, + "allOf": [ + { + "not": { + "required": [ + "hosts" + ] + } + }, + { + "not": { + "required": [ + "tasks" + ] + } + }, + { + "not": { + "required": [ + "import_playbook" + ] + } + }, + { + "not": { + "required": [ + "block" + ] + } + } + ], + "properties": { + "action": { + "title": "Action", + "type": "string" + }, + "any_errors_fatal": { + "title": "Any Errors Fatal", + "type": "boolean" + }, + "args": { + "$ref": "#/$defs/templated-object", + "title": "Args" + }, + "async": { + "$ref": "#/$defs/templated-integer", + "title": "Async" + }, + "become": { + "$ref": "#/$defs/templated-boolean", + "title": "Become" + }, + "become_exe": { + "title": "Become Exe", + "type": "string" + }, + "become_flags": { + "title": "Become Flags", + "type": "string" + }, + "become_method": { + "$ref": "#/$defs/become_method" + }, + "become_user": { + "title": "Become User", + "type": "string" + }, + "changed_when": { + "$ref": "#/$defs/complex_conditional", + "markdownDescription": "See [changed_when](https://docs.ansible.com/ansible/latest/user_guide/playbooks_error_handling.html#defining-changed)", + "title": "Changed When" + }, + "check_mode": { + "$ref": "#/$defs/complex_conditional", + "title": "Check Mode" + }, + "collections": { + "items": { + "type": "string" + }, + "title": "Collections", + "type": "array" + }, + "connection": { + "title": "Connection", + "type": "string" + }, + "debugger": { + "title": "Debugger", + "type": "string" + }, + "delay": { + "$ref": "#/$defs/templated-integer", + "title": "Delay" + }, + "delegate_facts": { + "title": "Delegate Facts", + "type": "boolean" + }, + "delegate_to": { + "title": "Delegate To", + "type": "string" + }, + "diff": { + "$ref": "#/$defs/templated-boolean", + "title": "Diff" + }, + "environment": { + "$ref": "#/$defs/environment" + }, + "failed_when": { + "$ref": "#/$defs/complex_conditional", + "title": "Failed When" + }, + "ignore_errors": { + "$ref": "#/$defs/ignore_errors" + }, + "ignore_unreachable": { + "title": "Ignore Unreachable", + "type": "boolean" + }, + "listen": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "markdownDescription": "Applies only to handlers. See [listen](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_handlers.html)", + "title": "Listen" + }, + "local_action": { + "title": "Local Action", + "type": [ + "string", + "object" + ] + }, + "loop": { + "title": "Loop", + "type": [ + "string", + "array" + ] + }, + "loop_control": { + "title": "Loop Control" + }, + "module_defaults": { + "title": "Module Defaults" + }, + "name": { + "title": "Name", + "type": "string" + }, + "no_log": { + "$ref": "#/$defs/no_log" + }, + "notify": { + "anyOf": [ + { + "type": "string" + }, + { + "items": { + "type": "string" + }, + "type": "array" + } + ], + "title": "Notify" + }, + "poll": { + "$ref": "#/$defs/templated-integer", + "title": "Poll" + }, + "port": { + "$ref": "#/$defs/templated-integer", + "title": "Port" + }, + "register": { + "title": "Register", + "type": "string" + }, + "remote_user": { + "title": "Remote User", + "type": "string" + }, + "retries": { + "$ref": "#/$defs/templated-integer", + "title": "Retries" + }, + "run_once": { + "$ref": "#/$defs/templated-boolean", + "title": "Run Once" + }, + "tags": { + "$ref": "#/$defs/tags", + "title": "Tags" + }, + "throttle": { + "$ref": "#/$defs/templated-integer", + "title": "Throttle" + }, + "timeout": { + "$ref": "#/$defs/templated-integer", + "title": "Timeout" + }, + "until": { + "$ref": "#/$defs/complex_conditional", + "title": "Until" + }, + "vars": { + "title": "Vars", + "type": "object" + }, + "when": { + "$ref": "#/$defs/complex_conditional", + "title": "When" + }, + "with_dict": { + "title": "With Dict" + }, + "with_fileglob": { + "title": "With Fileglob" + }, + "with_filetree": { + "title": "With Filetree" + }, + "with_first_found": { + "title": "With First Found" + }, + "with_indexed_items": { + "title": "With Indexed Items" + }, + "with_ini": { + "title": "With Ini" + }, + "with_inventory_hostnames": { + "title": "With Inventory Hostnames" + }, + "with_items": { + "anyOf": [ + { + "$ref": "#/$defs/full-jinja" + }, + { + "type": "array" + } + ], + "markdownDescription": "See [loops](https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#loops)", + "title": "With Items" + }, + "with_lines": { + "title": "With Lines" + }, + "with_random_choice": { + "title": "With Random Choice" + }, + "with_sequence": { + "title": "With Sequence" + }, + "with_subelements": { + "title": "With Subelements" + }, + "with_together": { + "title": "With Together" + } + }, + "title": "task", + "type": "object" + }, + "templated-boolean": { + "oneOf": [ + { + "type": "boolean" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-integer": { + "oneOf": [ + { + "type": "integer" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + }, + "templated-object": { + "oneOf": [ + { + "type": "object" + }, + { + "$ref": "#/$defs/full-jinja", + "type": "string" + } + ] + } + }, + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/tasks.json", + "$schema": "http://json-schema.org/draft-07/schema", + "examples": [ + "tasks/*.yml", + "handlers/*.yml" + ], + "items": { + "anyOf": [ + { + "$ref": "#/$defs/block" + }, + { + "$ref": "#/$defs/task" + } + ] + }, + "title": "Ansible Tasks Schema", + "type": [ + "array", + "null" + ] +} diff --git a/src/ansiblelint/schemas/vars.json b/src/ansiblelint/schemas/vars.json new file mode 100644 index 0000000..28bf7ba --- /dev/null +++ b/src/ansiblelint/schemas/vars.json @@ -0,0 +1,29 @@ +{ + "$id": "https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/ansible-vars.json", + "$schema": "http://json-schema.org/draft-07/schema", + "anyOf": [ + { + "additionalProperties": false, + "patternProperties": { + "^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\w]*$": {} + }, + "type": "object" + }, + { + "pattern": "^\\$ANSIBLE_VAULT;", + "type": "string" + }, + { + "type": "null" + } + ], + "examples": [ + "playbooks/vars/*.yml", + "vars/*.yml", + "defaults/*.yml", + "host_vars/*.yml", + "group_vars/*.yml" + ], + "markdownDescription": "See [Using Variables](https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html)", + "title": "Ansible Vars Schema" +} diff --git a/src/ansiblelint/skip_utils.py b/src/ansiblelint/skip_utils.py new file mode 100644 index 0000000..be12171 --- /dev/null +++ b/src/ansiblelint/skip_utils.py @@ -0,0 +1,293 @@ +# (c) 2019–2020, Ansible by Red Hat +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +"""Utils related to inline skipping of rules.""" +from __future__ import annotations + +import collections.abc +import logging +import re +from functools import lru_cache +from itertools import product +from typing import TYPE_CHECKING, Any, Generator, Sequence + +# Module 'ruamel.yaml' does not explicitly export attribute 'YAML'; implicit reexport disabled +from ruamel.yaml import YAML +from ruamel.yaml.composer import ComposerError +from ruamel.yaml.scanner import ScannerError +from ruamel.yaml.tokens import CommentToken + +from ansiblelint.config import used_old_tags +from ansiblelint.constants import ( + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + RENAMED_TAGS, + SKIPPED_RULES_KEY, +) +from ansiblelint.file_utils import Lintable + +if TYPE_CHECKING: + from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject + +_logger = logging.getLogger(__name__) +_found_deprecated_tags: set[str] = set() +_noqa_comment_re = re.compile(r"^# noqa(\s|:)") + +# playbook: Sequence currently expects only instances of one of the two +# classes below but we should consider avoiding this chimera. +# ruamel.yaml.comments.CommentedSeq +# ansible.parsing.yaml.objects.AnsibleSequence + + +def get_rule_skips_from_line(line: str) -> list[str]: + """Return list of rule ids skipped via comment on the line of yaml.""" + _before_noqa, _noqa_marker, noqa_text = line.partition("# noqa") + + result = [] + for v in noqa_text.lstrip(" :").split(): + if v in RENAMED_TAGS: + tag = RENAMED_TAGS[v] + if v not in _found_deprecated_tags: + _logger.warning( + "Replaced outdated tag '%s' with '%s', replace it to avoid future regressions", + v, + tag, + ) + _found_deprecated_tags.add(v) + v = tag + result.append(v) + return result + + +def append_skipped_rules( + pyyaml_data: AnsibleBaseYAMLObject, lintable: Lintable +) -> AnsibleBaseYAMLObject: + """Append 'skipped_rules' to individual tasks or single metadata block. + + For a file, uses 2nd parser (ruamel.yaml) to pull comments out of + yaml subsets, check for '# noqa' skipped rules, and append any skips to the + original parser (pyyaml) data relied on by remainder of ansible-lint. + + :param pyyaml_data: file text parsed via ansible and pyyaml. + :param file_text: raw file text. + :param file_type: type of file: tasks, handlers or meta. + :returns: original pyyaml_data altered with a 'skipped_rules' list added \ + to individual tasks, or added to the single metadata block. + """ + try: + yaml_skip = _append_skipped_rules(pyyaml_data, lintable) + except RuntimeError: + # Notify user of skip error, do not stop, do not change exit code + _logger.error("Error trying to append skipped rules", exc_info=True) + return pyyaml_data + + if not yaml_skip: + return pyyaml_data + + return yaml_skip + + +@lru_cache(maxsize=None) +def load_data(file_text: str) -> Any: + """Parse ``file_text`` as yaml and return parsed structure. + + This is the main culprit for slow performance, each rule asks for loading yaml again and again + ideally the ``maxsize`` on the decorator above MUST be great or equal total number of rules + :param file_text: raw text to parse + :return: Parsed yaml + """ + yaml = YAML() + # Ruamel role is not to validate the yaml file, so we ignore duplicate keys: + yaml.allow_duplicate_keys = True + try: + return yaml.load(file_text) + except ComposerError: + # load fails on multi-documents with ComposerError exception + return yaml.load_all(file_text) + + +def _append_skipped_rules( # noqa: max-complexity: 12 + pyyaml_data: AnsibleBaseYAMLObject, lintable: Lintable +) -> AnsibleBaseYAMLObject | None: + # parse file text using 2nd parser library + try: + ruamel_data = load_data(lintable.content) + except ScannerError as exc: + _logger.debug( + "Ignored loading skipped rules from file %s due to: %s", lintable, exc + ) + # For unparsable file types, we return empty skip lists + return None + skipped_rules = _get_rule_skips_from_yaml(ruamel_data, lintable) + + if lintable.kind in [ + "yaml", + "requirements", + "vars", + "meta", + "reno", + "test-meta", + "galaxy", + ]: + # AnsibleMapping, dict + if hasattr(pyyaml_data, "get"): + pyyaml_data[SKIPPED_RULES_KEY] = skipped_rules + # AnsibleSequence, list + elif ( + not isinstance(pyyaml_data, str) + and isinstance(pyyaml_data, collections.abc.Sequence) + and skipped_rules + ): + pyyaml_data[0][SKIPPED_RULES_KEY] = skipped_rules + + return pyyaml_data + + # create list of blocks of tasks or nested tasks + if lintable.kind in ("tasks", "handlers"): + ruamel_task_blocks = ruamel_data + pyyaml_task_blocks = pyyaml_data + elif lintable.kind == "playbook": + try: + pyyaml_task_blocks = _get_task_blocks_from_playbook(pyyaml_data) + ruamel_task_blocks = _get_task_blocks_from_playbook(ruamel_data) + except (AttributeError, TypeError): + return pyyaml_data + else: + # For unsupported file types, we return empty skip lists + return None + + # get tasks from blocks of tasks + pyyaml_tasks = _get_tasks_from_blocks(pyyaml_task_blocks) + ruamel_tasks = _get_tasks_from_blocks(ruamel_task_blocks) + + # append skipped_rules for each task + for ruamel_task, pyyaml_task in zip(ruamel_tasks, pyyaml_tasks): + # ignore empty tasks + if not pyyaml_task and not ruamel_task: + continue + + # AnsibleUnicode or str + if isinstance(pyyaml_task, str): + continue + + if pyyaml_task.get("name") != ruamel_task.get("name"): + raise RuntimeError("Error in matching skip comment to a task") + pyyaml_task[SKIPPED_RULES_KEY] = _get_rule_skips_from_yaml( + ruamel_task, lintable + ) + + return pyyaml_data + + +def _get_task_blocks_from_playbook(playbook: Sequence[Any]) -> list[Any]: + """Return parts of playbook that contains tasks, and nested tasks. + + :param playbook: playbook yaml from yaml parser. + :returns: list of task dictionaries. + """ + task_blocks = [] + for play, key in product(playbook, PLAYBOOK_TASK_KEYWORDS): + task_blocks.extend(play.get(key, [])) + return task_blocks + + +def _get_tasks_from_blocks(task_blocks: Sequence[Any]) -> Generator[Any, None, None]: + """Get list of tasks from list made of tasks and nested tasks.""" + if not task_blocks: + return + + def get_nested_tasks(task: Any) -> Generator[Any, None, None]: + if not task or not is_nested_task(task): + return + for k in NESTED_TASK_KEYS: + if k in task and task[k]: + if hasattr(task[k], "get"): + continue + for subtask in task[k]: + yield from get_nested_tasks(subtask) + yield subtask + + for task in task_blocks: + yield from get_nested_tasks(task) + yield task + + +def _get_rule_skips_from_yaml( # noqa: max-complexity: 12 + yaml_input: Sequence[Any], lintable: Lintable +) -> Sequence[Any]: + """Traverse yaml for comments with rule skips and return list of rules.""" + yaml_comment_obj_strings = [] + + if isinstance(yaml_input, str): + return [] + + def traverse_yaml(obj: Any) -> None: + for _, entry in obj.ca.items.items(): + for v in entry: + if isinstance(v, CommentToken): + comment_str = v.value + if _noqa_comment_re.match(comment_str): + line = v.start_mark.line + 1 # ruamel line numbers start at 0 + # column = v.start_mark.column + 1 # ruamel column numbers start at 0 + lintable.line_skips[line].update( + get_rule_skips_from_line(comment_str.strip()) + ) + yaml_comment_obj_strings.append(str(obj.ca.items)) + if isinstance(obj, dict): + for _, val in obj.items(): + if isinstance(val, (dict, list)): + traverse_yaml(val) + elif isinstance(obj, list): + for element in obj: + if isinstance(element, (dict, list)): + traverse_yaml(element) + else: + return + + if isinstance(yaml_input, (dict, list)): + traverse_yaml(yaml_input) + + rule_id_list = [] + for comment_obj_str in yaml_comment_obj_strings: + for line in comment_obj_str.split(r"\n"): + rule_id_list.extend(get_rule_skips_from_line(line)) + + return [normalize_tag(tag) for tag in rule_id_list] + + +def normalize_tag(tag: str) -> str: + """Return current name of tag.""" + if tag in RENAMED_TAGS: + used_old_tags[tag] = RENAMED_TAGS[tag] + return RENAMED_TAGS[tag] + return tag + + +def is_nested_task(task: dict[str, Any]) -> bool: + """Check if task includes block/always/rescue.""" + # Cannot really trust the input + if isinstance(task, str): + return False + + for key in NESTED_TASK_KEYS: + if task.get(key): + return True + + return False diff --git a/src/ansiblelint/stats.py b/src/ansiblelint/stats.py new file mode 100644 index 0000000..67320b8 --- /dev/null +++ b/src/ansiblelint/stats.py @@ -0,0 +1,36 @@ +"""Module hosting functionality about reporting.""" +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(order=True) +class TagStats: + """Tag statistics.""" + + order: int = 0 # to be computed based on rule's profile + tag: str = "" # rule effective id (can be multiple tags per rule id) + count: int = 0 # total number of occurrences + warning: bool = False # set true if listed in warn_list + profile: str = "" + associated_tags: list[str] = field(default_factory=list) + + +class SummarizedResults: + """The statistics about an ansible-lint run.""" + + failures: int = 0 + warnings: int = 0 + fixed_failures: int = 0 + fixed_warnings: int = 0 + tag_stats: dict[str, TagStats] = {} + passed_profile: str = "" + + @property + def fixed(self) -> int: + """Get total fixed count.""" + return self.fixed_failures + self.fixed_warnings + + def sort(self) -> None: + """Sort tag stats by tag name.""" + self.tag_stats = dict(sorted(self.tag_stats.items(), key=lambda t: t[1])) diff --git a/src/ansiblelint/testing/__init__.py b/src/ansiblelint/testing/__init__.py new file mode 100644 index 0000000..a4ebcc8 --- /dev/null +++ b/src/ansiblelint/testing/__init__.py @@ -0,0 +1,147 @@ +"""Test utils for ansible-lint.""" +from __future__ import annotations + +import os +import shutil +import subprocess +import sys +import tempfile +from typing import TYPE_CHECKING, Any + +from ansiblelint.app import get_app +from ansiblelint.rules import RulesCollection + +if TYPE_CHECKING: + # https://github.com/PyCQA/pylint/issues/3240 + # pylint: disable=unsubscriptable-object + CompletedProcess = subprocess.CompletedProcess[Any] + from ansiblelint.errors import MatchError # noqa: E402 +else: + CompletedProcess = subprocess.CompletedProcess + +# pylint: disable=wrong-import-position +from ansiblelint.runner import Runner # noqa: E402 + + +class RunFromText: + """Use Runner on temp files created from testing text snippets.""" + + app = None + + def __init__(self, collection: RulesCollection) -> None: + """Initialize a RunFromText instance with rules collection.""" + # Emulate command line execution initialization as without it Ansible module + # would be loaded with incomplete module/role/collection list. + if not self.app: # pragma: no cover + self.app = get_app() + + self.collection = collection + + def _call_runner(self, path: str) -> list[MatchError]: + runner = Runner(path, rules=self.collection) + return runner.run() + + def run(self, filename: str) -> list[MatchError]: + """Lints received filename.""" + return self._call_runner(filename) + + def run_playbook( + self, playbook_text: str, prefix: str = "playbook" + ) -> list[MatchError]: + """Lints received text as a playbook.""" + with tempfile.NamedTemporaryFile(mode="w", suffix=".yml", prefix=prefix) as fh: + fh.write(playbook_text) + fh.flush() + results = self._call_runner(fh.name) + return results + + def run_role_tasks_main(self, tasks_main_text: str) -> list[MatchError]: + """Lints received text as tasks.""" + role_path = tempfile.mkdtemp(prefix="role_") + tasks_path = os.path.join(role_path, "tasks") + os.makedirs(tasks_path) + with open(os.path.join(tasks_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(tasks_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + def run_role_meta_main(self, meta_main_text: str) -> list[MatchError]: + """Lints received text as meta.""" + role_path = tempfile.mkdtemp(prefix="role_") + meta_path = os.path.join(role_path, "meta") + os.makedirs(meta_path) + with open(os.path.join(meta_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(meta_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + def run_role_defaults_main(self, defaults_main_text: str) -> list[MatchError]: + """Lints received text as vars file in defaults.""" + role_path = tempfile.mkdtemp(prefix="role_") + defaults_path = os.path.join(role_path, "defaults") + os.makedirs(defaults_path) + with open(os.path.join(defaults_path, "main.yml"), "w", encoding="utf-8") as fh: + fh.write(defaults_main_text) + fh.flush() + results = self._call_runner(role_path) + shutil.rmtree(role_path) + return results + + +def run_ansible_lint( + *argv: str, + cwd: str | None = None, + executable: str | None = None, + env: dict[str, str] | None = None, + offline: bool = True, +) -> CompletedProcess: + """Run ansible-lint on a given path and returns its output.""" + args = [*argv] + if offline: # pragma: no cover + args.insert(0, "--offline") + + if not executable: + executable = sys.executable + args = [sys.executable, "-m", "ansiblelint", *args] + else: + args = [executable, *args] + + # It is not safe to pass entire env for testing as other tests would + # pollute the env, causing weird behaviors, so we pass only a safe list of + # vars. + safe_list = [ + "COVERAGE_FILE", + "COVERAGE_PROCESS_START", + "HOME", + "LANG", + "LC_ALL", + "LC_CTYPE", + "NO_COLOR", + "PATH", + "PYTHONIOENCODING", + "PYTHONPATH", + "TERM", + "VIRTUAL_ENV", + ] + + if env is None: + _env = {} + else: + _env = env + for v in safe_list: + if v in os.environ and v not in _env: + _env[v] = os.environ[v] + + return subprocess.run( + args, + capture_output=True, + shell=False, # needed when command is a list + check=False, + cwd=cwd, + env=_env, + text=True, + ) diff --git a/src/ansiblelint/testing/fixtures.py b/src/ansiblelint/testing/fixtures.py new file mode 100644 index 0000000..bf7160f --- /dev/null +++ b/src/ansiblelint/testing/fixtures.py @@ -0,0 +1,55 @@ +"""PyTest Fixtures. + +They should not be imported, instead add code below to your root conftest.py +file: + +pytest_plugins = ['ansiblelint.testing'] +""" +from __future__ import annotations + +import copy +import os +from argparse import Namespace +from typing import Iterator + +import pytest +from _pytest.fixtures import SubRequest + +from ansiblelint.config import options # noqa: F401 +from ansiblelint.constants import DEFAULT_RULESDIR +from ansiblelint.rules import RulesCollection +from ansiblelint.testing import RunFromText + + +@pytest.fixture(name="default_rules_collection") +def fixture_default_rules_collection() -> RulesCollection: + """Return default rule collection.""" + assert os.path.isdir(DEFAULT_RULESDIR) + # For testing we want to manually enable opt-in rules + options.enable_list = ["no-same-owner"] + return RulesCollection(rulesdirs=[DEFAULT_RULESDIR], options=options) + + +@pytest.fixture +def default_text_runner(default_rules_collection: RulesCollection) -> RunFromText: + """Return RunFromText instance for the default set of collections.""" + return RunFromText(default_rules_collection) + + +@pytest.fixture +def rule_runner(request: SubRequest, config_options: Namespace) -> RunFromText: + """Return runner for a specific rule class.""" + rule_class = request.param + config_options.enable_list.append(rule_class().id) + collection = RulesCollection(options=config_options) + collection.register(rule_class()) + return RunFromText(collection) + + +@pytest.fixture(name="config_options") +def fixture_config_options() -> Iterator[Namespace]: + """Return configuration options that will be restored after testrun.""" + global options # pylint: disable=global-statement,invalid-name + original_options = copy.deepcopy(options) + yield options + options = original_options diff --git a/src/ansiblelint/text.py b/src/ansiblelint/text.py new file mode 100644 index 0000000..12ec9ad --- /dev/null +++ b/src/ansiblelint/text.py @@ -0,0 +1,50 @@ +"""Text utils.""" +from __future__ import annotations + +import re +from functools import lru_cache + +RE_HAS_JINJA = re.compile(r"{[{%#].*[%#}]}", re.DOTALL) +RE_HAS_GLOB = re.compile("[][*?]") + + +def strip_ansi_escape(data: str | bytes) -> str: + """Remove all ANSI escapes from string or bytes. + + If bytes is passed instead of string, it will be converted to string + using UTF-8. + """ + if isinstance(data, bytes): # pragma: no branch + data = data.decode("utf-8") + + return re.sub(r"\x1b[^m]*m", "", data) + + +def toidentifier(text: str) -> str: + """Convert unsafe chars to ones allowed in variables.""" + result = re.sub(r"[\s-]+", "_", text) + if not result.isidentifier(): + raise RuntimeError( + f"Unable to convert role name '{text}' to valid variable name." + ) + return result + + +# https://www.python.org/dev/peps/pep-0616/ +def removeprefix(self: str, prefix: str) -> str: + """Remove prefix from string.""" + if self.startswith(prefix): + return self[len(prefix) :] + return self[:] + + +@lru_cache(maxsize=None) +def has_jinja(value: str) -> bool: + """Return true if a string seems to contain jinja templating.""" + return bool(isinstance(value, str) and RE_HAS_JINJA.search(value)) + + +@lru_cache(maxsize=None) +def has_glob(value: str) -> bool: + """Return true if a string looks like having a glob pattern.""" + return bool(isinstance(value, str) and RE_HAS_GLOB.search(value)) diff --git a/src/ansiblelint/transformer.py b/src/ansiblelint/transformer.py new file mode 100644 index 0000000..097c7ca --- /dev/null +++ b/src/ansiblelint/transformer.py @@ -0,0 +1,141 @@ +"""Transformer implementation.""" +from __future__ import annotations + +import logging +from argparse import Namespace +from typing import Union, cast + +from ruamel.yaml.comments import CommentedMap, CommentedSeq + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule, TransformMixin +from ansiblelint.runner import LintResult +from ansiblelint.yaml_utils import FormattedYAML, get_path_to_play, get_path_to_task + +__all__ = ["Transformer"] + +_logger = logging.getLogger(__name__) + + +# pylint: disable=too-few-public-methods +class Transformer: + """Transformer class marshals transformations. + + The Transformer is similar to the ``ansiblelint.runner.Runner`` which manages + running each of the rules. We only expect there to be one ``Transformer`` instance + which should be instantiated from the main entrypoint function. + + In the future, the transformer will be responsible for running transforms for each + of the rule matches. For now, it just reads/writes YAML files which is a + pre-requisite for the planned rule-specific transforms. + """ + + def __init__(self, result: LintResult, options: Namespace): + """Initialize a Transformer instance.""" + self.write_set = self.effective_write_set(options.write_list) + + self.matches: list[MatchError] = result.matches + self.files: set[Lintable] = result.files + + lintables: dict[str, Lintable] = {file.filename: file for file in result.files} + self.matches_per_file: dict[Lintable, list[MatchError]] = { + file: [] for file in result.files + } + + for match in self.matches: + try: + lintable = lintables[match.filename] + except KeyError: + # we shouldn't get here, but this is easy to recover from so do that. + lintable = Lintable(match.filename) + self.matches_per_file[lintable] = [] + self.matches_per_file[lintable].append(match) + + @staticmethod + def effective_write_set(write_list: list[str]) -> set[str]: + """Simplify write_list based on ``"none"`` and ``"all"`` keywords. + + ``"none"`` resets the enabled rule transforms. + This returns ``{"none"}`` or a set of everything after the last ``"none"``. + + If ``"all"`` is in the ``write_list`` (after ``"none"`` if present), + then this will return ``{"all"}``. + """ + none_indexes = [i for i, value in enumerate(write_list) if value == "none"] + if none_indexes: + index = none_indexes[-1] + if len(write_list) > index + 1: + index += 1 + write_list = write_list[index:] + if "all" in write_list: + return {"all"} + return set(write_list) + + def run(self) -> None: + """For each file, read it, execute transforms on it, then write it.""" + for file, matches in self.matches_per_file.items(): + # str() convinces mypy that "text/yaml" is a valid Literal. + # Otherwise, it thinks base_kind is one of playbook, meta, tasks, ... + file_is_yaml = str(file.base_kind) == "text/yaml" + + try: + data: str = file.content + except (UnicodeDecodeError, IsADirectoryError): + # we hit a binary file (eg a jar or tar.gz) or a directory + data = "" + file_is_yaml = False + + ruamel_data: CommentedMap | CommentedSeq | None = None + if file_is_yaml: + # We need a fresh YAML() instance for each load because ruamel.yaml + # stores intermediate state during load which could affect loading + # any other files. (Based on suggestion from ruamel.yaml author) + yaml = FormattedYAML() + + ruamel_data = yaml.loads(data) + if not isinstance(ruamel_data, (CommentedMap, CommentedSeq)): + # This is an empty vars file or similar which loads as None. + # It is not safe to write this file or data-loss is likely. + # Only maps and sequences can preserve comments. Skip it. + continue + + if self.write_set != {"none"}: + self._do_transforms(file, ruamel_data or data, file_is_yaml, matches) + + if file_is_yaml: + # noinspection PyUnboundLocalVariable + file.content = yaml.dumps(ruamel_data) + + if file.updated: + file.write() + + def _do_transforms( + self, + file: Lintable, + data: CommentedMap | CommentedSeq | str, + file_is_yaml: bool, + matches: list[MatchError], + ) -> None: + """Do Rule-Transforms handling any last-minute MatchError inspections.""" + for match in sorted(matches): + if not isinstance(match.rule, TransformMixin): + continue + if self.write_set != {"all"}: + rule = cast(AnsibleLintRule, match.rule) + rule_definition = set(rule.tags) + rule_definition.add(rule.id) + if rule_definition.isdisjoint(self.write_set): + # rule transform not requested. Skip it. + continue + if file_is_yaml and not match.yaml_path: + data = cast(Union[CommentedMap, CommentedSeq], data) + if match.match_type == "play": + match.yaml_path = get_path_to_play(file, match.linenumber, data) + elif match.task or file.kind in ( + "tasks", + "handlers", + "playbook", + ): + match.yaml_path = get_path_to_task(file, match.linenumber, data) + match.rule.transform(match, file, data) diff --git a/src/ansiblelint/utils.py b/src/ansiblelint/utils.py new file mode 100644 index 0000000..3e71e91 --- /dev/null +++ b/src/ansiblelint/utils.py @@ -0,0 +1,914 @@ +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# spell-checker:ignore dwim +"""Generic utility helpers.""" +from __future__ import annotations + +import contextlib +import inspect +import logging +import os +import re +import warnings +from argparse import Namespace +from collections.abc import ItemsView, Mapping +from functools import lru_cache +from pathlib import Path +from typing import Any, Callable, Generator, Sequence + +import yaml +from ansible.errors import AnsibleError, AnsibleParserError +from ansible.module_utils.parsing.convert_bool import boolean +from ansible.parsing.dataloader import DataLoader +from ansible.parsing.mod_args import ModuleArgsParser +from ansible.parsing.splitter import split_args +from ansible.parsing.yaml.constructor import AnsibleConstructor, AnsibleMapping +from ansible.parsing.yaml.loader import AnsibleLoader +from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleSequence +from ansible.plugins.loader import add_all_plugin_dirs +from ansible.template import Templar +from ansible.utils.collection_loader import AnsibleCollectionConfig +from yaml.composer import Composer +from yaml.representer import RepresenterError + +from ansiblelint._internal.rules import ( + AnsibleParserErrorRule, + LoadingFailureRule, + RuntimeErrorRule, +) +from ansiblelint.app import get_app +from ansiblelint.config import options +from ansiblelint.constants import ( + FILENAME_KEY, + INCLUSION_ACTION_NAMES, + LINE_NUMBER_KEY, + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + ROLE_IMPORT_ACTION_NAMES, + SKIPPED_RULES_KEY, + FileType, +) +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable, discover_lintables +from ansiblelint.skip_utils import is_nested_task +from ansiblelint.text import removeprefix + +# ansible-lint doesn't need/want to know about encrypted secrets, so we pass a +# string as the password to enable such yaml files to be opened and parsed +# successfully. +DEFAULT_VAULT_PASSWORD = "x" +COLLECTION_PLAY_RE = re.compile(r"^[\w\d_]+\.[\w\d_]+\.[\w\d_]+$") + +PLAYBOOK_DIR = os.environ.get("ANSIBLE_PLAYBOOK_DIR", None) + + +_logger = logging.getLogger(__name__) + + +def parse_yaml_from_file(filepath: str) -> AnsibleBaseYAMLObject: + """Extract a decrypted YAML object from file.""" + dataloader = DataLoader() + if hasattr(dataloader, "set_vault_password"): + dataloader.set_vault_password(DEFAULT_VAULT_PASSWORD) + return dataloader.load_from_file(filepath) + + +def path_dwim(basedir: str, given: str) -> str: + """Convert a given path do-what-I-mean style.""" + dataloader = DataLoader() + dataloader.set_basedir(basedir) + return str(dataloader.path_dwim(given)) + + +def ansible_templar(basedir: str, templatevars: Any) -> Templar: + """Create an Ansible Templar using templatevars.""" + # `basedir` is the directory containing the lintable file. + # Therefore, for tasks in a role, `basedir` has the form + # `roles/some_role/tasks`. On the other hand, the search path + # is `roles/some_role/{files,templates}`. As a result, the + # `tasks` part in the basedir should be stripped stripped. + if os.path.basename(basedir) == "tasks": + basedir = os.path.dirname(basedir) + + dataloader = DataLoader() + dataloader.set_basedir(basedir) + templar = Templar(dataloader, variables=templatevars) + return templar + + +def ansible_template( + basedir: str, varname: Any, templatevars: Any, **kwargs: Any +) -> Any: + """Render a templated string by mocking missing filters.""" + templar = ansible_templar(basedir=basedir, templatevars=templatevars) + # pylint: disable=unused-variable + for i in range(3): + try: + kwargs["disable_lookups"] = True + return templar.template(varname, **kwargs) + except AnsibleError as exc: + if ( + "was found, however lookups were disabled from templating" + in exc.message + ): + # ansible core does an early exit when disable_lookup=True but + # this happens after the jinja2 syntax already passed. + break + if ( + exc.message.startswith("template error while templating string:") + and "'" in exc.message + ): + missing_filter = exc.message.split("'")[1] + if missing_filter == "end of print statement": + raise + # Mock the filter to avoid and error from Ansible templating + # pylint: disable=protected-access + templar.environment.filters._delegatee[missing_filter] = lambda x: x + # Record the mocked filter so we can warn the user + if missing_filter not in options.mock_filters: + _logger.debug("Mocking missing filter %s", missing_filter) + options.mock_filters.append(missing_filter) + continue + raise + + +BLOCK_NAME_TO_ACTION_TYPE_MAP = { + "tasks": "task", + "handlers": "handler", + "pre_tasks": "task", + "post_tasks": "task", + "block": "meta", + "rescue": "meta", + "always": "meta", +} + + +def tokenize(line: str) -> tuple[str, list[str], dict[str, str]]: + """Parse a string task invocation.""" + tokens = line.lstrip().split(" ") + if tokens[0] == "-": + tokens = tokens[1:] + if tokens[0] == "action:" or tokens[0] == "local_action:": + tokens = tokens[1:] + command = tokens[0].replace(":", "") + + args = [] + kwargs = {} + non_kv_found = False + for arg in tokens[1:]: + if "=" in arg and not non_kv_found: + key_value = arg.split("=", 1) + kwargs[key_value[0]] = key_value[1] + else: + non_kv_found = True + args.append(arg) + return (command, args, kwargs) + + +def _playbook_items(pb_data: AnsibleBaseYAMLObject) -> ItemsView: # type: ignore + if isinstance(pb_data, dict): + return pb_data.items() + if not pb_data: + return [] # type: ignore + + # "if play" prevents failure if the play sequence contains None, + # which is weird but currently allowed by Ansible + # https://github.com/ansible/ansible-lint/issues/849 + return [item for play in pb_data if play for item in play.items()] # type: ignore + + +def _set_collections_basedir(basedir: str) -> None: + # Sets the playbook directory as playbook_paths for the collection loader + AnsibleCollectionConfig.playbook_paths = basedir + + +def find_children(lintable: Lintable) -> list[Lintable]: # noqa: C901 + """Traverse children of a single file or folder.""" + if not lintable.path.exists(): + return [] + playbook_dir = str(lintable.path.parent) + _set_collections_basedir(playbook_dir or os.path.abspath(".")) + add_all_plugin_dirs(playbook_dir or ".") + if lintable.kind == "role": + playbook_ds = AnsibleMapping({"roles": [{"role": str(lintable.path)}]}) + elif lintable.kind not in ("playbook", "tasks"): + return [] + else: + try: + playbook_ds = parse_yaml_from_file(str(lintable.path)) + except AnsibleError as exc: + raise SystemExit(exc) from exc + results = [] + basedir = os.path.dirname(str(lintable.path)) + # playbook_ds can be an AnsibleUnicode string, which we consider invalid + if isinstance(playbook_ds, str): + raise MatchError(filename=lintable, rule=LoadingFailureRule()) + for item in _playbook_items(playbook_ds): + # if lintable.kind not in ["playbook"]: + # continue + for child in play_children(basedir, item, lintable.kind, playbook_dir): + # We avoid processing parametrized children + path_str = str(child.path) + if "$" in path_str or "{{" in path_str: + continue + + # Repair incorrect paths obtained when old syntax was used, like: + # - include: simpletask.yml tags=nginx + valid_tokens = [] + for token in split_args(path_str): + if "=" in token: + break + valid_tokens.append(token) + path = " ".join(valid_tokens) + if path != path_str: + child.path = Path(path) + child.name = child.path.name + + results.append(child) + return results + + +def template( + basedir: str, + value: Any, + variables: Any, + fail_on_error: bool = False, + fail_on_undefined: bool = False, + **kwargs: str, +) -> Any: + """Attempt rendering a value with known vars.""" + try: + value = ansible_template( + os.path.abspath(basedir), + value, + variables, + **dict(kwargs, fail_on_undefined=fail_on_undefined), + ) + # Hack to skip the following exception when using to_json filter on a variable. + # I guess the filter doesn't like empty vars... + except (AnsibleError, ValueError, RepresenterError): + # templating failed, so just keep value as is. + if fail_on_error: + raise + return value + + +def play_children( + basedir: str, item: tuple[str, Any], parent_type: FileType, playbook_dir: str +) -> list[Lintable]: + """Flatten the traversed play tasks.""" + # pylint: disable=unused-argument + delegate_map: dict[str, Callable[[str, Any, Any, FileType], list[Lintable]]] = { + "tasks": _taskshandlers_children, + "pre_tasks": _taskshandlers_children, + "post_tasks": _taskshandlers_children, + "block": _taskshandlers_children, + "include": _include_children, + "ansible.builtin.include": _include_children, + "import_playbook": _include_children, + "ansible.builtin.import_playbook": _include_children, + "roles": _roles_children, + "dependencies": _roles_children, + "handlers": _taskshandlers_children, + "include_tasks": _include_children, + "ansible.builtin.include_tasks": _include_children, + "import_tasks": _include_children, + "ansible.builtin.import_tasks": _include_children, + } + (k, v) = item + add_all_plugin_dirs(os.path.abspath(basedir)) + + if k in delegate_map: + if v: + v = template( + os.path.abspath(basedir), + v, + {"playbook_dir": PLAYBOOK_DIR or os.path.abspath(basedir)}, + fail_on_undefined=False, + ) + return delegate_map[k](basedir, k, v, parent_type) + return [] + + +def _include_children( + basedir: str, k: str, v: Any, parent_type: FileType +) -> list[Lintable]: + # handle special case include_tasks: name=filename.yml + if k in INCLUSION_ACTION_NAMES and isinstance(v, dict) and "file" in v: + v = v["file"] + + # we cannot really parse any jinja2 in includes, so we ignore them + if not v or "{{" in v: + return [] + + if "import_playbook" in k and COLLECTION_PLAY_RE.match(v): + # Any import_playbooks from collections should be ignored as ansible + # own syntax check will handle them. + return [] + + # handle include: filename.yml tags=blah + # pylint: disable=unused-variable + (command, args, kwargs) = tokenize(f"{k}: {v}") + + result = path_dwim(basedir, args[0]) + while basedir not in ["", "/"]: + if os.path.exists(result): + break + basedir = os.path.dirname(basedir) + result = path_dwim(basedir, args[0]) + + return [Lintable(result, kind=parent_type)] + + +def _taskshandlers_children( + basedir: str, k: str, v: None | Any, parent_type: FileType +) -> list[Lintable]: + results: list[Lintable] = [] + if v is None: + raise MatchError( + message="A malformed block was encountered while loading a block.", + rule=RuntimeErrorRule(), + ) + for task_handler in v: + # ignore empty tasks, `-` + if not task_handler: + continue + + with contextlib.suppress(LookupError): + children = _get_task_handler_children_for_tasks_or_playbooks( + task_handler, + basedir, + k, + parent_type, + ) + results.append(children) + continue + + if any(x in task_handler for x in ROLE_IMPORT_ACTION_NAMES): + # lgtm [py/unreachable-statement] + task_handler = normalize_task_v2(task_handler) + _validate_task_handler_action_for_role(task_handler["action"]) + results.extend( + _roles_children( + basedir, + k, + [task_handler["action"].get("name")], + parent_type, + main=task_handler["action"].get("tasks_from", "main"), + ) + ) + continue + + if "block" not in task_handler: + continue + + results.extend( + _taskshandlers_children(basedir, k, task_handler["block"], parent_type) + ) + if "rescue" in task_handler: + results.extend( + _taskshandlers_children(basedir, k, task_handler["rescue"], parent_type) + ) + if "always" in task_handler: + results.extend( + _taskshandlers_children(basedir, k, task_handler["always"], parent_type) + ) + + return results + + +def _get_task_handler_children_for_tasks_or_playbooks( + task_handler: dict[str, Any], + basedir: str, + k: Any, + parent_type: FileType, +) -> Lintable: + """Try to get children of taskhandler for include/import tasks/playbooks.""" + child_type = k if parent_type == "playbook" else parent_type + + # Include the FQCN task names as this happens before normalize + for task_handler_key in INCLUSION_ACTION_NAMES: + with contextlib.suppress(KeyError): + # ignore empty tasks + if not task_handler: # pragma: no branch + continue + + file_name = task_handler[task_handler_key] + if isinstance(file_name, Mapping) and file_name.get("file", None): + file_name = file_name["file"] + + f = path_dwim(basedir, file_name) + while basedir not in ["", "/"]: + if os.path.exists(f): + break + basedir = os.path.dirname(basedir) + f = path_dwim(basedir, file_name) + return Lintable(f, kind=child_type) + + raise LookupError( + f'The node contains none of: {", ".join(sorted(INCLUSION_ACTION_NAMES))}', + ) + + +def _validate_task_handler_action_for_role(th_action: dict[str, Any]) -> None: + """Verify that the task handler action is valid for role include.""" + module = th_action["__ansible_module__"] + + if "name" not in th_action: + raise MatchError(message=f"Failed to find required 'name' key in {module!s}") + + if not isinstance(th_action["name"], str): + raise MatchError( + message=f"Value assigned to 'name' key on '{module!s}' is not a string.", + ) + + +def _roles_children( + basedir: str, k: str, v: Sequence[Any], parent_type: FileType, main: str = "main" +) -> list[Lintable]: + # pylint: disable=unused-argument # parent_type) + results: list[Lintable] = [] + if not v: + # typing does not prevent junk from being passed in + return results + for role in v: + if isinstance(role, dict): + if "role" in role or "name" in role: + if "tags" not in role or "skip_ansible_lint" not in role["tags"]: + results.extend( + _look_for_role_files( + basedir, role.get("role", role.get("name")), main=main + ) + ) + elif k != "dependencies": + raise SystemExit( + f'role dict {role} does not contain a "role" or "name" key' + ) + else: + results.extend(_look_for_role_files(basedir, role, main=main)) + return results + + +def _rolepath(basedir: str, role: str) -> str | None: + role_path = None + + possible_paths = [ + # if included from a playbook + path_dwim(basedir, os.path.join("roles", role)), + path_dwim(basedir, role), + # if included from roles/[role]/meta/main.yml + path_dwim(basedir, os.path.join("..", "..", "..", "roles", role)), + path_dwim(basedir, os.path.join("..", "..", role)), + # if checking a role in the current directory + path_dwim(basedir, os.path.join("..", role)), + ] + + for loc in get_app().runtime.config.default_roles_path: + loc = os.path.expanduser(loc) + possible_paths.append(path_dwim(loc, role)) + + possible_paths.append(path_dwim(basedir, "")) + + for path_option in possible_paths: # pragma: no branch + if os.path.isdir(path_option): + role_path = path_option + break + + if role_path: # pragma: no branch + add_all_plugin_dirs(role_path) + + return role_path + + +def _look_for_role_files( + basedir: str, role: str, main: str | None = "main" +) -> list[Lintable]: + # pylint: disable=unused-argument # main + role_path = _rolepath(basedir, role) + if not role_path: # pragma: no branch + return [] + + results = [] + + for kind in ["tasks", "meta", "handlers", "vars", "defaults"]: + current_path = os.path.join(role_path, kind) + for folder, _, files in os.walk(current_path): + for file in files: + file_ignorecase = file.lower() + if file_ignorecase.endswith((".yml", ".yaml")): + results.append(Lintable(os.path.join(folder, file))) + + return results + + +def _kv_to_dict(v: str) -> dict[str, Any]: + (command, args, kwargs) = tokenize(v) + return {"__ansible_module__": command, "__ansible_arguments__": args, **kwargs} + + +def _sanitize_task(task: dict[str, Any]) -> dict[str, Any]: + """Return a stripped-off task structure compatible with new Ansible. + + This helper takes a copy of the incoming task and drops + any internally used keys from it. + """ + result = task.copy() + # task is an AnsibleMapping which inherits from OrderedDict, so we need + # to use `del` to remove unwanted keys. + for k in [SKIPPED_RULES_KEY, FILENAME_KEY, LINE_NUMBER_KEY]: + if k in result: + del result[k] + return result + + +def _extract_ansible_parsed_keys_from_task( + result: dict[str, Any], + task: dict[str, Any], + keys: tuple[str, ...], +) -> dict[str, Any]: + """Return a dict with existing key in task.""" + for k, v in list(task.items()): + if k in keys: + # we don't want to re-assign these values, which were + # determined by the ModuleArgsParser() above + continue + result[k] = v + return result + + +def normalize_task_v2(task: dict[str, Any]) -> dict[str, Any]: + """Ensure tasks have a normalized action key and strings are converted to python objects.""" + result: dict[str, Any] = {} + ansible_parsed_keys = ("action", "local_action", "args", "delegate_to") + + if is_nested_task(task): + _extract_ansible_parsed_keys_from_task(result, task, ansible_parsed_keys) + # Add dummy action for block/always/rescue statements + result["action"] = { + "__ansible_module__": "block/always/rescue", + "__ansible_module_original__": "block/always/rescue", + } + + return result + + sanitized_task = _sanitize_task(task) + mod_arg_parser = ModuleArgsParser(sanitized_task) + + try: + action, arguments, result["delegate_to"] = mod_arg_parser.parse( + skip_action_validation=options.skip_action_validation + ) + except AnsibleParserError as exc: + # pylint: disable=raise-missing-from + raise MatchError( + rule=AnsibleParserErrorRule(), + message=exc.message, + filename=task.get(FILENAME_KEY, "Unknown"), + linenumber=task.get(LINE_NUMBER_KEY, 0), + ) + + # denormalize shell -> command conversion + if "_uses_shell" in arguments: + action = "shell" + del arguments["_uses_shell"] + + _extract_ansible_parsed_keys_from_task( + result, task, ansible_parsed_keys + (action,) + ) + + if not isinstance(action, str): + raise RuntimeError(f"Task actions can only be strings, got {action}") + action_unnormalized = action + # convert builtin fqn calls to short forms because most rules know only + # about short calls but in the future we may switch the normalization to do + # the opposite. Mainly we currently consider normalized the module listing + # used by `ansible-doc -t module -l 2>/dev/null` + action = removeprefix(action, "ansible.builtin.") + result["action"] = { + "__ansible_module__": action, + "__ansible_module_original__": action_unnormalized, + } + + if "_raw_params" in arguments: + # Doing a split here is really bad as it would break jinja2 templating + # parsing of the template must happen before any kind of split. + result["action"]["__ansible_arguments__"] = [arguments["_raw_params"]] + del arguments["_raw_params"] + else: + result["action"]["__ansible_arguments__"] = [] + + if "argv" in arguments and not result["action"]["__ansible_arguments__"]: + result["action"]["__ansible_arguments__"] = arguments["argv"] + del arguments["argv"] + + result["action"].update(arguments) + return result + + +def normalize_task(task: dict[str, Any], filename: str) -> dict[str, Any]: + """Unify task-like object structures.""" + ansible_action_type = task.get("__ansible_action_type__", "task") + if "__ansible_action_type__" in task: + del task["__ansible_action_type__"] + task = normalize_task_v2(task) + task[FILENAME_KEY] = filename + task["__ansible_action_type__"] = ansible_action_type + return task + + +def task_to_str(task: dict[str, Any]) -> str: + """Make a string identifier for the given task.""" + name = task.get("name") + if name: + return str(name) + action = task.get("action") + if isinstance(action, str) or not isinstance(action, dict): + return str(action) + args = [ + f"{k}={v}" + for (k, v) in action.items() + if k + not in [ + "__ansible_module__", + "__ansible_module_original__", + "__ansible_arguments__", + LINE_NUMBER_KEY, + FILENAME_KEY, + ] + ] + + for item in action.get("__ansible_arguments__", []): + args.append(str(item)) + + return f"{action['__ansible_module__']} {' '.join(args)}" + + +def extract_from_list( + blocks: AnsibleBaseYAMLObject, candidates: list[str], recursive: bool = False +) -> list[Any]: + """Get action tasks from block structures.""" + results = [] + for block in blocks: + for candidate in candidates: + if isinstance(block, dict) and candidate in block: + if isinstance(block[candidate], list): + subresults = add_action_type(block[candidate], candidate) + if recursive: + subresults.extend( + extract_from_list(subresults, candidates, recursive) + ) + results.extend(subresults) + elif block[candidate] is not None: + raise RuntimeError( + f"Key '{candidate}' defined, but bad value: '{str(block[candidate])}'" + ) + return results + + +def add_action_type(actions: AnsibleBaseYAMLObject, action_type: str) -> list[Any]: + """Add action markers to task objects.""" + results = [] + for action in actions: + # ignore empty task + if not action: + continue + action["__ansible_action_type__"] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type] + results.append(action) + return results + + +def get_action_tasks(data: AnsibleBaseYAMLObject, file: Lintable) -> list[Any]: + """Get a flattened list of action tasks from the file.""" + tasks = [] + if file.kind in ["tasks", "handlers"]: + tasks = add_action_type(data, file.kind) + else: + tasks.extend(extract_from_list(data, PLAYBOOK_TASK_KEYWORDS)) + + # Add sub-elements of block/rescue/always to tasks list + tasks.extend(extract_from_list(tasks, NESTED_TASK_KEYS, recursive=True)) + + return tasks + + +@lru_cache(maxsize=None) +def parse_yaml_linenumbers( # noqa: max-complexity: 12 + lintable: Lintable, +) -> AnsibleBaseYAMLObject: + """Parse yaml as ansible.utils.parse_yaml but with linenumbers. + + The line numbers are stored in each node's LINE_NUMBER_KEY key. + """ + result = [] + + def compose_node(parent: yaml.nodes.Node, index: int) -> yaml.nodes.Node: + # the line number where the previous token has ended (plus empty lines) + line = loader.line + node = Composer.compose_node(loader, parent, index) + if not isinstance(node, yaml.nodes.Node): + raise RuntimeError("Unexpected yaml data.") + setattr(node, "__line__", line + 1) + return node + + def construct_mapping( + node: AnsibleBaseYAMLObject, deep: bool = False + ) -> AnsibleMapping: + mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) + if hasattr(node, "__line__"): + mapping[LINE_NUMBER_KEY] = node.__line__ + else: + mapping[ + LINE_NUMBER_KEY + ] = mapping._line_number # pylint: disable=protected-access + mapping[FILENAME_KEY] = lintable.path + return mapping + + try: + kwargs = {} + if "vault_password" in inspect.getfullargspec(AnsibleLoader.__init__).args: + kwargs["vault_password"] = DEFAULT_VAULT_PASSWORD + loader = AnsibleLoader(lintable.content, **kwargs) + loader.compose_node = compose_node + loader.construct_mapping = construct_mapping + # while Ansible only accepts single documents, we also need to load + # multi-documents, as we attempt to load any YAML file, not only + # Ansible managed ones. + while True: + data = loader.get_data() + if data is None: + break + result.append(data) + except ( + yaml.parser.ParserError, + yaml.scanner.ScannerError, + yaml.constructor.ConstructorError, + ) as exc: + raise RuntimeError("Failed to load YAML file") from exc + + if len(result) == 0: + return None # empty documents + if len(result) == 1: + return result[0] + return result + + +def get_first_cmd_arg(task: dict[str, Any]) -> Any: + """Extract the first arg from a cmd task.""" + try: + if "cmd" in task["action"]: + first_cmd_arg = task["action"]["cmd"].split()[0] + else: + first_cmd_arg = task["action"]["__ansible_arguments__"][0].split()[0] + except IndexError: + return None + return first_cmd_arg + + +def get_second_cmd_arg(task: dict[str, Any]) -> Any: + """Extract the second arg from a cmd task.""" + try: + if "cmd" in task["action"]: + second_cmd_arg = task["action"]["cmd"].split()[1] + else: + second_cmd_arg = task["action"]["__ansible_arguments__"][0].split()[1] + except IndexError: + return None + return second_cmd_arg + + +def is_playbook(filename: str) -> bool: + """ + Check if the file is a playbook. + + Given a filename, it should return true if it looks like a playbook. The + function is not supposed to raise exceptions. + """ + # we assume is a playbook if we loaded a sequence of dictionaries where + # at least one of these keys is present: + playbooks_keys = { + "gather_facts", + "hosts", + "import_playbook", + "post_tasks", + "pre_tasks", + "roles", + "tasks", + } + + # makes it work with Path objects by converting them to strings + if not isinstance(filename, str): + filename = str(filename) + + try: + f = parse_yaml_from_file(filename) + except Exception as exc: # pylint: disable=broad-except + _logger.warning( + "Failed to load %s with %s, assuming is not a playbook.", filename, exc + ) + else: + if ( + isinstance(f, AnsibleSequence) + and hasattr(next(iter(f), {}), "keys") + and playbooks_keys.intersection(next(iter(f), {}).keys()) + ): + return True + return False + + +# pylint: disable=too-many-statements +def get_lintables( + opts: Namespace = Namespace(), args: list[str] | None = None +) -> list[Lintable]: + """Detect files and directories that are lintable.""" + lintables: list[Lintable] = [] + + # passing args bypass auto-detection mode + if args: + for arg in args: + lintable = Lintable(arg) + lintables.append(lintable) + else: + for filename in discover_lintables(opts): + path = Path(filename) + # skip exclusions + try: + for file_path in opts.exclude_paths: + if str(path.resolve()).startswith(str(file_path)): + raise FileNotFoundError( + f"File {file_path} matched exclusion entry: {path}" + ) + except FileNotFoundError as exc: + _logger.debug("Ignored %s due to: %s", path, exc) + continue + + if path.is_symlink() and not path.exists(): + _logger.warning("Ignored broken symlink %s -> %s", path, path.resolve()) + continue + + lintables.append(Lintable(path)) + + # stage 2: guess roles from current lintables, as there is no unique + # file that must be present in any kind of role. + _extend_with_roles(lintables) + + return lintables + + +def _extend_with_roles(lintables: list[Lintable]) -> None: + """Detect roles among lintables and adds them to the list.""" + for lintable in lintables: + parts = lintable.path.parent.parts + if "roles" in parts: + role = lintable.path + while role.parent.name != "roles" and role.name: + role = role.parent + if role.exists() and not role.is_file(): + lintable = Lintable(role, kind="role") + if lintable not in lintables: + _logger.debug("Added role: %s", lintable) + lintables.append(lintable) + + +def convert_to_boolean(value: Any) -> bool: + """Use Ansible to convert something to a boolean.""" + return bool(boolean(value)) + + +def nested_items( + data: dict[Any, Any] | list[Any], parent: str = "" +) -> Generator[tuple[Any, Any, str], None, None]: + """Iterate a nested data structure.""" + warnings.warn( + "Call to deprecated function ansiblelint.utils.nested_items. " + "Use ansiblelint.yaml_utils.nested_items_path instead.", + category=DeprecationWarning, + stacklevel=2, + ) + if isinstance(data, dict): + for k, v in data.items(): + yield k, v, parent + # pylint: disable=redefined-outer-name + for k, v, returned_parent in nested_items(v, k): + yield k, v, returned_parent + if isinstance(data, list): + for item in data: + yield "list-item", item, parent + for k, v, returned_parent in nested_items(item): + yield k, v, returned_parent diff --git a/src/ansiblelint/version.py b/src/ansiblelint/version.py new file mode 100644 index 0000000..4967ced --- /dev/null +++ b/src/ansiblelint/version.py @@ -0,0 +1,14 @@ +"""Ansible-lint version information.""" +try: + from ._version import version as __version__ +except ImportError: # pragma: no cover + try: + import pkg_resources + + __version__ = pkg_resources.get_distribution("ansible-lint").version + except Exception: # pylint: disable=broad-except + # this is the fallback SemVer version picked by setuptools_scm when tag + # information is not available. + __version__ = "0.1.dev1" + +__all__ = ("__version__",) diff --git a/src/ansiblelint/yaml_utils.py b/src/ansiblelint/yaml_utils.py new file mode 100644 index 0000000..ca8a2e1 --- /dev/null +++ b/src/ansiblelint/yaml_utils.py @@ -0,0 +1,1146 @@ +"""Utility helpers to simplify working with yaml-based data.""" +# pylint: disable=too-many-lines +from __future__ import annotations + +import functools +import logging +import os +import re +from io import StringIO +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + Pattern, + Sequence, + Tuple, + Union, + cast, +) + +import ruamel.yaml.events +from ruamel.yaml.comments import CommentedMap, CommentedSeq, Format +from ruamel.yaml.constructor import RoundTripConstructor +from ruamel.yaml.emitter import Emitter, ScalarAnalysis + +# Module 'ruamel.yaml' does not explicitly export attribute 'YAML'; implicit reexport disabled +# To make the type checkers happy, we import from ruamel.yaml.main instead. +from ruamel.yaml.main import YAML +from ruamel.yaml.nodes import ScalarNode +from ruamel.yaml.representer import RoundTripRepresenter +from ruamel.yaml.scalarint import ScalarInt +from ruamel.yaml.tokens import CommentToken +from yamllint.config import YamlLintConfig + +from ansiblelint.constants import ( + ANNOTATION_KEYS, + NESTED_TASK_KEYS, + PLAYBOOK_TASK_KEYWORDS, + SKIPPED_RULES_KEY, +) +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.utils import get_action_tasks, normalize_task + +if TYPE_CHECKING: + # noinspection PyProtectedMember + from ruamel.yaml.comments import LineCol # pylint: disable=ungrouped-imports + +_logger = logging.getLogger(__name__) + +YAMLLINT_CONFIG = """ +extends: default +rules: + comments: + # https://github.com/prettier/prettier/issues/6780 + min-spaces-from-content: 1 + # https://github.com/adrienverge/yamllint/issues/384 + comments-indentation: false + document-start: disable + # 160 chars was the default used by old E204 rule, but + # you can easily change it or disable in your .yamllint file. + line-length: + max: 160 + # We are adding an extra space inside braces as that's how prettier does it + # and we are trying not to fight other linters. + braces: + min-spaces-inside: 0 # yamllint defaults to 0 + max-spaces-inside: 1 # yamllint defaults to 0 + octal-values: + forbid-implicit-octal: true # yamllint defaults to false + forbid-explicit-octal: true # yamllint defaults to false +""" + + +def deannotate(data: Any) -> Any: + """Remove our annotations like __file__ and __line__ and return a JSON serializable object.""" + if isinstance(data, dict): + result = data.copy() + for key, value in data.items(): + if key in ANNOTATION_KEYS: + del result[key] + else: + result[key] = deannotate(value) + return result + if isinstance(data, list): + return [deannotate(item) for item in data if item not in ANNOTATION_KEYS] + return data + + +@functools.lru_cache(maxsize=1) +def load_yamllint_config() -> YamlLintConfig: + """Load our default yamllint config and any customized override file.""" + config = YamlLintConfig(content=YAMLLINT_CONFIG) + # if we detect local yamllint config we use it but raise a warning + # as this is likely to get out of sync with our internal config. + for file in [ + ".yamllint", + ".yamllint.yaml", + ".yamllint.yml", + os.getenv("YAMLLINT_CONFIG_FILE", ""), + os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config")) + + "/yamllint/config", + ]: + if os.path.isfile(file): + _logger.debug( + "Loading custom %s config file, this extends our " + "internal yamllint config.", + file, + ) + config_override = YamlLintConfig(file=file) + config_override.extend(config) + config = config_override + break + _logger.debug("Effective yamllint rules used: %s", config.rules) + return config + + +def iter_tasks_in_file( + lintable: Lintable, +) -> Iterator[tuple[dict[str, Any], dict[str, Any], list[str], MatchError | None]]: + """Iterate over tasks in file. + + This yields a 4-tuple of raw_task, normalized_task, skip_tags, and error. + + raw_task: + When looping through the tasks in the file, each "raw_task" is minimally + processed to include these special keys: __line__, __file__, skipped_rules. + normalized_task: + When each raw_task is "normalized", action shorthand (strings) get parsed + by ansible into python objects and the action key gets normalized. If the task + should be skipped (skipped is True) or normalizing it fails (error is not None) + then this is just the raw_task instead of a normalized copy. + skip_tags: + List of tags found to be skipped, from tags block or noqa comments + error: + This is normally None. It will be a MatchError when the raw_task cannot be + normalized due to an AnsibleParserError. + + :param lintable: The playbook or tasks/handlers yaml file to get tasks from + + Yields raw_task, normalized_task, skipped, error + """ + data = lintable.data + if not data: + return + + raw_tasks = get_action_tasks(data, lintable) + + for raw_task in raw_tasks: + err: MatchError | None = None + + skip_tags: list[str] = raw_task.get(SKIPPED_RULES_KEY, []) + + try: + normalized_task = normalize_task(raw_task, str(lintable.path)) + except MatchError as err: + # normalize_task converts AnsibleParserError to MatchError + yield raw_task, raw_task, skip_tags, err + return + + if "skip_ansible_lint" in raw_task.get("tags", []): + skip_tags.append("skip_ansible_lint") + if skip_tags: + yield raw_task, normalized_task, skip_tags, err + continue + + yield raw_task, normalized_task, skip_tags, err + + +def nested_items_path( + data_collection: dict[Any, Any] | list[Any], + ignored_keys: Sequence[str] = (), +) -> Iterator[tuple[Any, Any, list[str | int]]]: + """Iterate a nested data structure, yielding key/index, value, and parent_path. + + This is a recursive function that calls itself for each nested layer of data. + Each iteration yields: + + 1. the current item's dictionary key or list index, + 2. the current item's value, and + 3. the path to the current item from the outermost data structure. + + For dicts, the yielded (1) key and (2) value are what ``dict.items()`` yields. + For lists, the yielded (1) index and (2) value are what ``enumerate()`` yields. + The final component, the parent path, is a list of dict keys and list indexes. + The parent path can be helpful in providing error messages that indicate + precisely which part of a yaml file (or other data structure) needs to be fixed. + + For example, given this playbook: + + .. code-block:: yaml + + - name: A play + tasks: + - name: A task + debug: + msg: foobar + + Here's the first and last yielded items: + + .. code-block:: python + + >>> playbook=[{"name": "a play", "tasks": [{"name": "a task", "debug": {"msg": "foobar"}}]}] + >>> next( nested_items_path( playbook ) ) + (0, {'name': 'a play', 'tasks': [{'name': 'a task', 'debug': {'msg': 'foobar'}}]}, []) + >>> list( nested_items_path( playbook ) )[-1] + ('msg', 'foobar', [0, 'tasks', 0, 'debug']) + + Note that, for outermost data structure, the parent path is ``[]`` because + you do not need to descend into any nested dicts or lists to find the indicated + key and value. + + If a rule were designed to prohibit "foobar" debug messages, it could use the + parent path to provide a path to the problematic ``msg``. It might use a jq-style + path in its error message: "the error is at ``.[0].tasks[0].debug.msg``". + Or if a utility could automatically fix issues, it could use the path to descend + to the parent object using something like this: + + .. code-block:: python + + target = data + for segment in parent_path: + target = target[segment] + + :param data_collection: The nested data (dicts or lists). + + :returns: each iteration yields the key (of the parent dict) or the index (lists) + """ + # As typing and mypy cannot effectively ensure we are called only with + # valid data, we better ignore NoneType + if data_collection is None: + return + yield from _nested_items_path( + data_collection=data_collection, parent_path=[], ignored_keys=ignored_keys + ) + + +def _nested_items_path( + data_collection: dict[Any, Any] | list[Any], + parent_path: list[str | int], + ignored_keys: Sequence[str] = (), +) -> Iterator[tuple[Any, Any, list[str | int]]]: + """Iterate through data_collection (internal implementation of nested_items_path). + + This is a separate function because callers of nested_items_path should + not be using the parent_path param which is used in recursive _nested_items_path + calls to build up the path to the parent object of the current key/index, value. + """ + # we have to cast each convert_to_tuples assignment or mypy complains + # that both assignments (for dict and list) do not have the same type + convert_to_tuples_type = Callable[[], Iterator[Tuple[Union[str, int], Any]]] + if isinstance(data_collection, dict): + convert_data_collection_to_tuples = cast( + convert_to_tuples_type, functools.partial(data_collection.items) + ) + elif isinstance(data_collection, list): + convert_data_collection_to_tuples = cast( + convert_to_tuples_type, functools.partial(enumerate, data_collection) + ) + else: + raise TypeError( + f"Expected a dict or a list but got {data_collection!r} " + f"of type '{type(data_collection)}'" + ) + for key, value in convert_data_collection_to_tuples(): + if key in (SKIPPED_RULES_KEY, "__file__", "__line__", *ignored_keys): + continue + yield key, value, parent_path + if isinstance(value, (dict, list)): + yield from _nested_items_path( + data_collection=value, parent_path=parent_path + [key] + ) + + +def get_path_to_play( + lintable: Lintable, + line_number: int, # 1-based + ruamel_data: CommentedMap | CommentedSeq, +) -> list[str | int]: + """Get the path to the play in the given file at the given line number.""" + if line_number < 1: + raise ValueError(f"expected line_number >= 1, got {line_number}") + if lintable.kind != "playbook" or not isinstance(ruamel_data, CommentedSeq): + return [] + lc: LineCol # lc uses 0-based counts # pylint: disable=invalid-name + # line_number is 1-based. Convert to 0-based. + line_index = line_number - 1 + + prev_play_line_index = ruamel_data.lc.line + last_play_index = len(ruamel_data) + for play_index, play in enumerate(ruamel_data): + next_play_index = play_index + 1 + if last_play_index > next_play_index: + next_play_line_index = ruamel_data[next_play_index].lc.line + else: + next_play_line_index = None + + lc = play.lc # pylint: disable=invalid-name + assert isinstance(lc.line, int) + if lc.line == line_index: + return [play_index] + if play_index > 0 and prev_play_line_index < line_index < lc.line: + return [play_index - 1] + # The previous play check (above) can't catch the last play, + # so, handle the last play separately. + if ( + next_play_index == last_play_index + and line_index > lc.line + and (next_play_line_index is None or line_index < next_play_line_index) + ): + # part of this (last) play + return [play_index] + prev_play_line_index = play.lc.line + return [] + + +def get_path_to_task( + lintable: Lintable, + line_number: int, # 1-based + ruamel_data: CommentedMap | CommentedSeq, +) -> list[str | int]: + """Get the path to the task in the given file at the given line number.""" + if line_number < 1: + raise ValueError(f"expected line_number >= 1, got {line_number}") + if lintable.kind in ("tasks", "handlers"): + assert isinstance(ruamel_data, CommentedSeq) + return _get_path_to_task_in_tasks_block(line_number, ruamel_data) + if lintable.kind == "playbook": + assert isinstance(ruamel_data, CommentedSeq) + return _get_path_to_task_in_playbook(line_number, ruamel_data) + # if lintable.kind in ["yaml", "requirements", "vars", "meta", "reno", "test-meta"]: + + return [] + + +def _get_path_to_task_in_playbook( + line_number: int, # 1-based + ruamel_data: CommentedSeq, +) -> list[str | int]: + """Get the path to the task in the given playbook data at the given line number.""" + last_play_index = len(ruamel_data) + for play_index, play in enumerate(ruamel_data): + next_play_index = play_index + 1 + if last_play_index > next_play_index: + next_play_line_index = ruamel_data[next_play_index].lc.line + else: + next_play_line_index = None + + play_keys = list(play.keys()) + for tasks_keyword in PLAYBOOK_TASK_KEYWORDS: + if not play.get(tasks_keyword): + continue + + try: + next_keyword = play_keys[play_keys.index(tasks_keyword) + 1] + except IndexError: + next_block_line_index = None + else: + next_block_line_index = play.lc.data[next_keyword][0] + # last_line_number_in_block is 1-based; next_*_line_index is 0-based + # next_*_line_index - 1 to get line before next_*_line_index. + # Then + 1 to make it a 1-based number. + # So, last_line_number_in_block = next_*_line_index - 1 + 1 + if next_block_line_index is not None: + last_line_number_in_block = next_block_line_index + elif next_play_line_index is not None: + last_line_number_in_block = next_play_line_index + else: + last_line_number_in_block = None + + task_path = _get_path_to_task_in_tasks_block( + line_number, play[tasks_keyword], last_line_number_in_block + ) + if task_path: + # mypy gets confused without this typehint + tasks_keyword_path: list[int | str] = [ + play_index, + tasks_keyword, + ] + return tasks_keyword_path + list(task_path) + # line_number is before first play or no tasks keywords in any of the plays + return [] + + +def _get_path_to_task_in_tasks_block( + line_number: int, # 1-based + tasks_block: CommentedSeq, + last_line_number: int | None = None, # 1-based +) -> list[str | int]: + """Get the path to the task in the given tasks block at the given line number.""" + task: CommentedMap | None + # line_number and last_line_number are 1-based. Convert to 0-based. + line_index = line_number - 1 + last_line_index = None if last_line_number is None else last_line_number - 1 + + # lc (LineCol) uses 0-based counts + prev_task_line_index = tasks_block.lc.line + last_task_index = len(tasks_block) + for task_index, task in enumerate(tasks_block): + next_task_index = task_index + 1 + if last_task_index > next_task_index: + if tasks_block[next_task_index] is not None: + next_task_line_index = tasks_block[next_task_index].lc.line + else: + next_task_line_index = tasks_block.lc.item(next_task_index)[0] + else: + next_task_line_index = None + + if task is None: + # create a dummy task to represent the null task + task = CommentedMap() + task.lc.line, task.lc.col = tasks_block.lc.item(task_index) + + nested_task_keys = set(task.keys()).intersection(set(NESTED_TASK_KEYS)) + if nested_task_keys: + subtask_path = _get_path_to_task_in_nested_tasks_block( + line_number, task, nested_task_keys, next_task_line_index + ) + if subtask_path: + # mypy gets confused without this typehint + task_path: list[str | int] = [task_index] + return task_path + list(subtask_path) + + assert isinstance(task.lc.line, int) + if task.lc.line == line_index: + return [task_index] + if task_index > 0 and prev_task_line_index < line_index < task.lc.line: + return [task_index - 1] + # The previous task check can't catch the last task, + # so, handle the last task separately (also after subtask checks). + # pylint: disable=too-many-boolean-expressions + if ( + next_task_index == last_task_index + and line_index > task.lc.line + and (next_task_line_index is None or line_index < next_task_line_index) + and (last_line_index is None or line_index <= last_line_index) + ): + # part of this (last) task + return [task_index] + prev_task_line_index = task.lc.line + # line is not part of this tasks block + return [] + + +def _get_path_to_task_in_nested_tasks_block( + line_number: int, # 1-based + task: CommentedMap, + nested_task_keys: set[str], + next_task_line_index: int | None = None, # 0-based +) -> list[str | int]: + """Get the path to the task in the given nested tasks block.""" + # loop through the keys in line order + task_keys = list(task.keys()) + task_keys_by_index = dict(enumerate(task_keys)) + for task_index, task_key in enumerate(task_keys): + nested_task_block = task[task_key] + if task_key not in nested_task_keys or not nested_task_block: + continue + next_task_key = task_keys_by_index.get(task_index + 1, None) + if next_task_key is not None: + next_task_key_line_index = task.lc.data[next_task_key][0] + else: + next_task_key_line_index = None + # last_line_number_in_block is 1-based; next_*_line_index is 0-based + # next_*_line_index - 1 to get line before next_*_line_index. + # Then + 1 to make it a 1-based number. + # So, last_line_number_in_block = next_*_line_index - 1 + 1 + last_line_number_in_block = ( + next_task_key_line_index + if next_task_key_line_index is not None + else next_task_line_index + ) + subtask_path = _get_path_to_task_in_tasks_block( + line_number, + nested_task_block, + last_line_number_in_block, # 1-based + ) + if subtask_path: + return [task_key] + list(subtask_path) + # line is not part of this nested tasks block + return [] + + +class OctalIntYAML11(ScalarInt): + """OctalInt representation for YAML 1.1.""" + + # tell mypy that ScalarInt has these attributes + _width: Any + _underscore: Any + + def __new__(cls, *args: Any, **kwargs: Any) -> Any: + """Create a new int with ScalarInt-defined attributes.""" + return ScalarInt.__new__(cls, *args, **kwargs) + + @staticmethod + def represent_octal(representer: RoundTripRepresenter, data: OctalIntYAML11) -> Any: + """Return a YAML 1.1 octal representation. + + Based on ruamel.yaml.representer.RoundTripRepresenter.represent_octal_int() + (which only handles the YAML 1.2 octal representation). + """ + v = format(data, "o") + anchor = data.yaml_anchor(any=True) + # noinspection PyProtectedMember + # pylint: disable=protected-access + return representer.insert_underscore("0", v, data._underscore, anchor=anchor) + + +class CustomConstructor(RoundTripConstructor): + """Custom YAML constructor that preserves Octal formatting in YAML 1.1.""" + + def construct_yaml_int(self, node: ScalarNode) -> Any: + """Construct int while preserving Octal formatting in YAML 1.1. + + ruamel.yaml only preserves the octal format for YAML 1.2. + For 1.1, it converts the octal to an int. So, we preserve the format. + + Code partially copied from ruamel.yaml (MIT licensed). + """ + ret = super().construct_yaml_int(node) + if self.resolver.processing_version == (1, 1) and isinstance(ret, int): + # Do not rewrite zero as octal. + if ret == 0: + return ret + # see if we've got an octal we need to preserve. + value_su = self.construct_scalar(node) + try: + v = value_su.rstrip("_") + underscore = [len(v) - v.rindex("_") - 1, False, False] # type: Any + except ValueError: + underscore = None + except IndexError: + underscore = None + value_s = value_su.replace("_", "") + if value_s[0] in "+-": + value_s = value_s[1:] + if value_s[0] == "0": + # got an octal in YAML 1.1 + ret = OctalIntYAML11( + ret, width=None, underscore=underscore, anchor=node.anchor + ) + return ret + + +CustomConstructor.add_constructor( + "tag:yaml.org,2002:int", CustomConstructor.construct_yaml_int +) + + +class FormattedEmitter(Emitter): + """Emitter that applies custom formatting rules when dumping YAML. + + Differences from ruamel.yaml defaults: + + - indentation of root-level sequences + - prefer double-quoted scalars over single-quoted scalars + + This ensures that root-level sequences are never indented. + All subsequent levels are indented as configured (normal ruamel.yaml behavior). + + Earlier implementations used dedent on ruamel.yaml's dumped output, + but string magic like that had a ton of problematic edge cases. + """ + + preferred_quote = '"' # either " or ' + + min_spaces_inside = 0 + max_spaces_inside = 1 + + _sequence_indent = 2 + _sequence_dash_offset = 0 # Should be _sequence_indent - 2 + _root_is_sequence = False + + _in_empty_flow_map = False + + @property + def _is_root_level_sequence(self) -> bool: + """Return True if this is a sequence at the root level of the yaml document.""" + return self.column < 2 and self._root_is_sequence + + def expect_document_root(self) -> None: + """Expect doc root (extend to record if the root doc is a sequence).""" + self._root_is_sequence = isinstance( + self.event, ruamel.yaml.events.SequenceStartEvent + ) + return super().expect_document_root() + + # NB: mypy does not support overriding attributes with properties yet: + # https://github.com/python/mypy/issues/4125 + # To silence we have to ignore[override] both the @property and the method. + + @property + def best_sequence_indent(self) -> int: + """Return the configured sequence_indent or 2 for root level.""" + return 2 if self._is_root_level_sequence else self._sequence_indent + + @best_sequence_indent.setter + def best_sequence_indent(self, value: int) -> None: + """Configure how many columns to indent each sequence item (including the '-').""" + self._sequence_indent = value + + @property + def sequence_dash_offset(self) -> int: + """Return the configured sequence_dash_offset or 0 for root level.""" + return 0 if self._is_root_level_sequence else self._sequence_dash_offset + + @sequence_dash_offset.setter + def sequence_dash_offset(self, value: int) -> None: + """Configure how many spaces to put before each sequence item's '-'.""" + self._sequence_dash_offset = value + + def choose_scalar_style(self) -> Any: + """Select how to quote scalars if needed.""" + style = super().choose_scalar_style() + if ( + style == "" + and self.event.value.startswith("0") + and len(self.event.value) > 1 + ): + if self.event.tag == "tag:yaml.org,2002:int" and self.event.implicit[0]: + # ensures that "0123" string does not lose its quoting + self.event.tag = "tag:yaml.org,2002:str" + self.event.implicit = (True, True, True) + return '"' + if style != "'": + # block scalar, double quoted, etc. + return style + if '"' in self.event.value: + return "'" + return self.preferred_quote + + def write_indicator( + self, + indicator: str, # ruamel.yaml typehint is wrong. This is a string. + need_whitespace: bool, + whitespace: bool = False, + indention: bool = False, # (sic) ruamel.yaml has this typo in their API + ) -> None: + """Make sure that flow maps get whitespace by the curly braces.""" + # We try to go with one whitespace by the curly braces and adjust accordingly + # to what min_spaces_inside and max_spaces_inside are set to. + # This assumes min_spaces_inside <= max_spaces_inside + spaces_inside = min( + max(1, self.min_spaces_inside), + self.max_spaces_inside if self.max_spaces_inside != -1 else 1, + ) + # If this is the end of the flow mapping that isn't on a new line: + if ( + indicator == "}" + and (self.column or 0) > (self.indent or 0) + and not self._in_empty_flow_map + ): + indicator = (" " * spaces_inside) + "}" + super().write_indicator(indicator, need_whitespace, whitespace, indention) + # if it is the start of a flow mapping, and it's not time + # to wrap the lines, insert a space. + if indicator == "{" and self.column < self.best_width: + if self.check_empty_mapping(): + self._in_empty_flow_map = True + else: + self.column += 1 + self.stream.write(" " * spaces_inside) + self._in_empty_flow_map = False + + # "/n/n" results in one blank line (end the previous line, then newline). + # So, "/n/n/n" or more is too many new lines. Clean it up. + _re_repeat_blank_lines: Pattern[str] = re.compile(r"\n{3,}") + + @staticmethod + def add_octothorpe_protection(string: str) -> str: + """Modify strings to protect "#" from full-line-comment post-processing.""" + try: + if "#" in string: + # # is \uFF03 (fullwidth number sign) + # ﹟ is \uFE5F (small number sign) + string = string.replace("#", "\uFF03#\uFE5F") + # this is safe even if this sequence is present + # because it gets reversed in post-processing + except (ValueError, TypeError): + # probably not really a string. Whatever. + pass + return string + + @staticmethod + def drop_octothorpe_protection(string: str) -> str: + """Remove string protection of "#" after full-line-comment post-processing.""" + try: + if "\uFF03#\uFE5F" in string: + # # is \uFF03 (fullwidth number sign) + # ﹟ is \uFE5F (small number sign) + string = string.replace("\uFF03#\uFE5F", "#") + except (ValueError, TypeError): + # probably not really a string. Whatever. + pass + return string + + def analyze_scalar(self, scalar: str) -> ScalarAnalysis: + """Determine quoting and other requirements for string. + + And protect "#" from full-line-comment post-processing. + """ + analysis: ScalarAnalysis = super().analyze_scalar(scalar) + if analysis.empty: + return analysis + analysis.scalar = self.add_octothorpe_protection(analysis.scalar) + return analysis + + # comment is a CommentToken, not Any (Any is ruamel.yaml's lazy type hint). + def write_comment(self, comment: CommentToken, pre: bool = False) -> None: + """Clean up extra new lines and spaces in comments. + + ruamel.yaml treats new or empty lines as comments. + See: https://stackoverflow.com/questions/42708668/removing-all-blank-lines-but-not-comments-in-ruamel-yaml/42712747#42712747 + """ + value: str = comment.value + if ( + pre + and not value.strip() + and not isinstance( + self.event, + ( + ruamel.yaml.events.CollectionEndEvent, + ruamel.yaml.events.DocumentEndEvent, + ruamel.yaml.events.StreamEndEvent, + ), + ) + ): + # drop pure whitespace pre comments + # does not apply to End events since they consume one of the newlines. + value = "" + elif pre: + # preserve content in pre comment with at least one newline, + # but no extra blank lines. + value = self._re_repeat_blank_lines.sub("\n", value) + else: + # single blank lines in post comments + value = self._re_repeat_blank_lines.sub("\n\n", value) + comment.value = value + + # make sure that the eol comment only has one space before it. + if comment.column > self.column + 1 and not pre: + comment.column = self.column + 1 + + return super().write_comment(comment, pre) + + def write_version_directive(self, version_text: Any) -> None: + """Skip writing '%YAML 1.1'.""" + if version_text == "1.1": + return + super().write_version_directive(version_text) + + +# pylint: disable=too-many-instance-attributes +class FormattedYAML(YAML): + """A YAML loader/dumper that handles ansible content better by default.""" + + def __init__( + self, + *, + typ: str | None = None, + pure: bool = False, + output: Any = None, + # input: Any = None, + plug_ins: list[str] | None = None, + ): + """Return a configured ``ruamel.yaml.YAML`` instance. + + Some config defaults get extracted from the yamllint config. + + ``ruamel.yaml.YAML`` uses attributes to configure how it dumps yaml files. + Some of these settings can be confusing, so here are examples of how different + settings will affect the dumped yaml. + + This example does not indent any sequences: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=2 + yaml.sequence_dash_offset=0 + + .. code:: yaml + + --- + - name: A playbook + tasks: + - name: Task + + This example indents all sequences including the root-level: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=4 + yaml.sequence_dash_offset=2 + # yaml.Emitter defaults to ruamel.yaml.emitter.Emitter + + .. code:: yaml + + --- + - name: Playbook + tasks: + - name: Task + + This example indents all sequences except at the root-level: + + .. code:: python + + yaml.explicit_start=True + yaml.map_indent=2 + yaml.sequence_indent=4 + yaml.sequence_dash_offset=2 + yaml.Emitter = FormattedEmitter # custom Emitter prevents root-level indents + + .. code:: yaml + + --- + - name: Playbook + tasks: + - name: Task + """ + # Default to reading/dumping YAML 1.1 (ruamel.yaml defaults to 1.2) + self._yaml_version_default: tuple[int, int] = (1, 1) + self._yaml_version: str | tuple[int, int] = self._yaml_version_default + + super().__init__(typ=typ, pure=pure, output=output, plug_ins=plug_ins) + + # NB: We ignore some mypy issues because ruamel.yaml typehints are not great. + + config = self._defaults_from_yamllint_config() + + # these settings are derived from yamllint config + self.explicit_start: bool = config["explicit_start"] # type: ignore[assignment] + self.explicit_end: bool = config["explicit_end"] # type: ignore[assignment] + self.width: int = config["width"] # type: ignore[assignment] + indent_sequences: bool = cast(bool, config["indent_sequences"]) + preferred_quote: str = cast(str, config["preferred_quote"]) # either ' or " + + min_spaces_inside: int = cast(int, config["min_spaces_inside"]) + max_spaces_inside: int = cast(int, config["max_spaces_inside"]) + + self.default_flow_style = False + self.compact_seq_seq = True # type: ignore[assignment] # dash after dash + self.compact_seq_map = True # type: ignore[assignment] # key after dash + + # Do not use yaml.indent() as it obscures the purpose of these vars: + self.map_indent = 2 # type: ignore[assignment] + self.sequence_indent = 4 if indent_sequences else 2 # type: ignore[assignment] + self.sequence_dash_offset = self.sequence_indent - 2 # type: ignore[operator] + + # If someone doesn't want our FormattedEmitter, they can change it. + self.Emitter = FormattedEmitter + + # ignore invalid preferred_quote setting + if preferred_quote in ['"', "'"]: + FormattedEmitter.preferred_quote = preferred_quote + # NB: default_style affects preferred_quote as well. + # self.default_style ∈ None (default), '', '"', "'", '|', '>' + + # spaces inside braces for flow mappings + FormattedEmitter.min_spaces_inside = min_spaces_inside + FormattedEmitter.max_spaces_inside = max_spaces_inside + + # We need a custom constructor to preserve Octal formatting in YAML 1.1 + self.Constructor = CustomConstructor + self.Representer.add_representer(OctalIntYAML11, OctalIntYAML11.represent_octal) + + # We should preserve_quotes loads all strings as a str subclass that carries + # a quote attribute. Will the str subclasses cause problems in transforms? + # Are there any other gotchas to this? + # + # This will only preserve quotes for strings read from the file. + # anything modified by the transform will use no quotes, preferred_quote, + # or the quote that results in the least amount of escaping. + # self.preserve_quotes = True + + # If needed, we can use this to change null representation to be explicit + # (see https://stackoverflow.com/a/44314840/1134951) + # self.Representer.add_representer( + # type(None), + # lambda self, data: self.represent_scalar("tag:yaml.org,2002:null", "null"), + # ) + + @staticmethod + def _defaults_from_yamllint_config() -> dict[str, bool | int | str]: + """Extract FormattedYAML-relevant settings from yamllint config if possible.""" + config = { + "explicit_start": True, + "explicit_end": False, + "width": 160, + "indent_sequences": True, + "preferred_quote": '"', + "min_spaces_inside": 0, + "max_spaces_inside": 1, + } + for rule, rule_config in load_yamllint_config().rules.items(): + if not rule_config: + # rule disabled + continue + + # refactor this if ... elif ... elif ... else monstrosity using match/case (PEP 634) once python 3.10 is mandatory + if rule == "document-start": + config["explicit_start"] = rule_config["present"] + elif rule == "document-end": + config["explicit_end"] = rule_config["present"] + elif rule == "line-length": + config["width"] = rule_config["max"] + elif rule == "braces": + min_spaces_inside = rule_config["min-spaces-inside"] + if min_spaces_inside: + config["min_spaces_inside"] = int(min_spaces_inside) + max_spaces_inside = rule_config["max-spaces-inside"] + if max_spaces_inside: + config["max_spaces_inside"] = int(max_spaces_inside) + elif rule == "indentation": + indent_sequences = rule_config["indent-sequences"] + # one of: bool, "whatever", "consistent" + # so, we use True for "whatever" and "consistent" + config["indent_sequences"] = bool(indent_sequences) + elif rule == "quoted-strings": + quote_type = rule_config["quote-type"] + # one of: single, double, any + if quote_type == "single": + config["preferred_quote"] = "'" + elif quote_type == "double": + config["preferred_quote"] = '"' + + return cast(Dict[str, Union[bool, int, str]], config) + + @property # type: ignore[override] + def version(self) -> str | tuple[int, int]: + """Return the YAML version used to parse or dump. + + Ansible uses PyYAML which only supports YAML 1.1. ruamel.yaml defaults to 1.2. + So, we have to make sure we dump yaml files using YAML 1.1. + We can relax the version requirement once ansible uses a version of PyYAML + that includes this PR: https://github.com/yaml/pyyaml/pull/555 + """ + return self._yaml_version + + @version.setter + def version(self, value: str | tuple[int, int] | None) -> None: + """Ensure that yaml version uses our default value. + + The yaml Reader updates this value based on the ``%YAML`` directive in files. + So, if a file does not include the directive, it sets this to None. + But, None effectively resets the parsing version to YAML 1.2 (ruamel's default). + """ + self._yaml_version = value if value is not None else self._yaml_version_default + + def loads(self, stream: str) -> Any: + """Load YAML content from a string while avoiding known ruamel.yaml issues.""" + if not isinstance(stream, str): + raise NotImplementedError(f"expected a str but got {type(stream)}") + text, preamble_comment = self._pre_process_yaml(stream) + data = self.load(stream=text) + if preamble_comment is not None: + setattr(data, "preamble_comment", preamble_comment) + return data + + def dumps(self, data: Any) -> str: + """Dump YAML document to string (including its preamble_comment).""" + preamble_comment: str | None = getattr(data, "preamble_comment", None) + self._prevent_wrapping_flow_style(data) + with StringIO() as stream: + if preamble_comment: + stream.write(preamble_comment) + self.dump(data, stream) + text = stream.getvalue() + return self._post_process_yaml(text) + + def _prevent_wrapping_flow_style(self, data: Any) -> None: + if not isinstance(data, (CommentedMap, CommentedSeq)): + return + for key, value, parent_path in nested_items_path(data): + if not isinstance(value, (CommentedMap, CommentedSeq)): + continue + fa: Format = value.fa # pylint: disable=invalid-name + if fa.flow_style(): + predicted_indent = self._predict_indent_length(parent_path, key) + predicted_width = len(str(value)) + if predicted_indent + predicted_width > self.width: + # this flow-style map will probably get line-wrapped, + # so, switch it to block style to avoid the line wrap. + fa.set_block_style() + + def _predict_indent_length(self, parent_path: list[str | int], key: Any) -> int: + indent = 0 + + # each parent_key type tells us what the indent is for the next level. + for parent_key in parent_path: + if isinstance(parent_key, int) and indent == 0: + # root level is a sequence + indent += self.sequence_dash_offset + elif isinstance(parent_key, int): + # next level is a sequence + indent += cast(int, self.sequence_indent) + elif isinstance(parent_key, str): + # next level is a map + indent += cast(int, self.map_indent) + + if isinstance(key, int) and indent == 0: + # flow map is an item in a root-level sequence + indent += self.sequence_dash_offset + elif isinstance(key, int) and indent > 0: + # flow map is in a sequence + indent += cast(int, self.sequence_indent) + elif isinstance(key, str): + # flow map is in a map + indent += len(key + ": ") + + return indent + + # ruamel.yaml only preserves empty (no whitespace) blank lines + # (ie "/n/n" becomes "/n/n" but "/n /n" becomes "/n"). + # So, we need to identify whitespace-only lines to drop spaces before reading. + _whitespace_only_lines_re = re.compile(r"^ +$", re.MULTILINE) + + def _pre_process_yaml(self, text: str) -> tuple[str, str | None]: + """Handle known issues with ruamel.yaml loading. + + Preserve blank lines despite extra whitespace. + Preserve any preamble (aka header) comments before "---". + + For more on preamble comments, see: https://stackoverflow.com/questions/70286108/python-ruamel-yaml-package-how-to-get-header-comment-lines/70287507#70287507 + """ + text = self._whitespace_only_lines_re.sub("", text) + + # I investigated extending ruamel.yaml to capture preamble comments. + # preamble comment goes from: + # DocumentStartToken.comment -> DocumentStartEvent.comment + # Then, in the composer: + # once in composer.current_event + # composer.compose_document() + # discards DocumentStartEvent + # move DocumentStartEvent to composer.last_event + # node = composer.compose_node(None, None) + # all document nodes get composed (events get used) + # discard DocumentEndEvent + # move DocumentEndEvent to composer.last_event + # return node + # So, there's no convenient way to extend the composer + # to somehow capture the comments and pass them on. + + preamble_comments = [] + if "\n---\n" not in text and "\n--- " not in text: + # nothing is before the document start mark, + # so there are no comments to preserve. + return text, None + for line in text.splitlines(True): + # We only need to capture the preamble comments. No need to remove them. + # lines might also include directives. + if line.lstrip().startswith("#") or line == "\n": + preamble_comments.append(line) + elif line.startswith("---"): + break + + return text, "".join(preamble_comments) or None + + @staticmethod + def _post_process_yaml(text: str) -> str: + """Handle known issues with ruamel.yaml dumping. + + Make sure there's only one newline at the end of the file. + + Fix the indent of full-line comments to match the indent of the next line. + See: https://stackoverflow.com/questions/71354698/how-can-i-use-the-ruamel-yaml-rtsc-mode/71355688#71355688 + Also, removes "#" protection from strings that prevents them from being + identified as full line comments in post-processing. + + Make sure null list items don't end in a space. + """ + text = text.rstrip("\n") + "\n" + + lines = text.splitlines(keepends=True) + full_line_comments: list[tuple[int, str]] = [] + for i, line in enumerate(lines): + stripped = line.lstrip() + if not stripped: + # blank line. Move on. + continue + + space_length = len(line) - len(stripped) + + if stripped.startswith("#"): + # got a full line comment + + # allow some full line comments to match the previous indent + if i > 0 and not full_line_comments and space_length: + prev = lines[i - 1] + prev_space_length = len(prev) - len(prev.lstrip()) + if prev_space_length == space_length: + # if the indent matches the previous line's indent, skip it. + continue + + full_line_comments.append((i, stripped)) + elif full_line_comments: + # end of full line comments so adjust to match indent of this line + spaces = " " * space_length + for index, comment in full_line_comments: + lines[index] = spaces + comment + full_line_comments.clear() + + cleaned = line.strip() + if not cleaned.startswith("#") and cleaned.endswith("-"): + # got an empty list item. drop any trailing spaces. + lines[i] = line.rstrip() + "\n" + + text = "".join( + FormattedEmitter.drop_octothorpe_protection(line) for line in lines + ) + return text + + +def clean_json( + obj: Any, + func: Callable[[str], Any] = lambda key: key.startswith("__") + if isinstance(key, str) + else False, +) -> Any: + """ + Remove all keys matching the condition from a nested JSON-like object. + + :param obj: a JSON like object to clean, also returned for chaining. + :param func: a callable that takes a key in argument and return True for each key to delete + """ + if isinstance(obj, dict): + for key in list(obj.keys()): + if func(key): + del obj[key] + else: + clean_json(obj[key], func) + elif isinstance(obj, list): + for i in reversed(range(len(obj))): + if func(obj[i]): + del obj[i] + else: + clean_json(obj[i], func) + else: + # neither a dict nor a list, do nothing + pass + return obj diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 0000000..2cd01f0 --- /dev/null +++ b/test/__init__.py @@ -0,0 +1 @@ +"""Use ansiblelint.testing instead for reusable tests.""" diff --git a/test/bar.txt b/test/bar.txt new file mode 100644 index 0000000..e22f90b --- /dev/null +++ b/test/bar.txt @@ -0,0 +1 @@ +Bar file diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 0000000..f31967d --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,106 @@ +"""PyTest fixtures for testing the project.""" +from __future__ import annotations + +import os +import shutil +import subprocess +from contextlib import contextmanager +from pathlib import Path +from typing import TYPE_CHECKING, Iterator + +import pytest + +# pylint: disable=wildcard-import,unused-wildcard-import +from ansiblelint.testing.fixtures import * # noqa: F403 +from ansiblelint.yaml_utils import FormattedYAML + +if TYPE_CHECKING: + from typing import List # pylint: disable=ungrouped-imports + + from _pytest import nodes + from _pytest.config import Config + from _pytest.config.argparsing import Parser + + +@contextmanager +def cwd(path: str) -> Iterator[None]: + """Context manager for chdir.""" + old_pwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(old_pwd) + + +def pytest_addoption(parser: Parser) -> None: + """Add --regenerate-formatting-fixtures option to pytest.""" + parser.addoption( + "--regenerate-formatting-fixtures", + action="store_true", + default=False, + help="Regenerate formatting fixtures with prettier and internal formatter", + ) + + +def pytest_collection_modifyitems(items: list[nodes.Item], config: Config) -> None: + """Skip tests based on --regenerate-formatting-fixtures option.""" + do_regenerate = config.getoption("--regenerate-formatting-fixtures") + skip_other = pytest.mark.skip( + reason="not a formatting_fixture test and " + "--regenerate-formatting-fixtures was specified" + ) + skip_formatting_fixture = pytest.mark.skip( + reason="specify --regenerate-formatting-fixtures to " + "only run formatting_fixtures test" + ) + for item in items: + if do_regenerate and "formatting_fixtures" not in item.keywords: + item.add_marker(skip_other) + elif not do_regenerate and "formatting_fixtures" in item.keywords: + item.add_marker(skip_formatting_fixture) + + +def pytest_configure(config: Config) -> None: + """Register custom markers.""" + if config.getoption("--regenerate-formatting-fixtures"): + regenerate_formatting_fixtures() + + +def regenerate_formatting_fixtures() -> None: + """Re-generate formatting fixtures with prettier and internal formatter. + + Pass ``--regenerate-formatting-fixtures`` to run this and skip all other tests. + This is a "test" because once fixtures are regenerated, + we run prettier again to make sure it does not change files formatted + with our internal formatting code. + """ + subprocess.check_call(["which", "prettier"]) + + yaml = FormattedYAML() + + fixtures_dir = Path("test/fixtures/") + fixtures_dir_before = fixtures_dir / "formatting-before" + fixtures_dir_prettier = fixtures_dir / "formatting-prettier" + fixtures_dir_after = fixtures_dir / "formatting-after" + + fixtures_dir_prettier.mkdir(exist_ok=True) + fixtures_dir_after.mkdir(exist_ok=True) + + # Copying before fixtures... + for fixture in fixtures_dir_before.glob("fmt-[0-9].yml"): + shutil.copy(str(fixture), str(fixtures_dir_prettier / fixture.name)) + shutil.copy(str(fixture), str(fixtures_dir_after / fixture.name)) + + # Writing fixtures with prettier... + subprocess.check_call(["prettier", "-w", str(fixtures_dir_prettier)]) + # NB: pre-commit end-of-file-fixer can also modify files. + + # Writing fixtures with ansiblelint.yaml_utils.FormattedYAML() + for fixture in fixtures_dir_after.glob("fmt-[0-9].yml"): + data = yaml.loads(fixture.read_text()) + output = yaml.dumps(data) + fixture.write_text(output) + + # Make sure prettier won't make changes in {fixtures_dir_after} + subprocess.check_call(["prettier", "-c", str(fixtures_dir_after)]) diff --git a/test/custom_rules/__init__.py b/test/custom_rules/__init__.py new file mode 100644 index 0000000..09a0f04 --- /dev/null +++ b/test/custom_rules/__init__.py @@ -0,0 +1 @@ +"""Dummy test module.""" diff --git a/test/custom_rules/example_com/__init__.py b/test/custom_rules/example_com/__init__.py new file mode 100644 index 0000000..a633c75 --- /dev/null +++ b/test/custom_rules/example_com/__init__.py @@ -0,0 +1 @@ +"""A dummy test module #2.""" diff --git a/test/custom_rules/example_com/example_com_rule.py b/test/custom_rules/example_com/example_com_rule.py new file mode 100644 index 0000000..abcb9dd --- /dev/null +++ b/test/custom_rules/example_com/example_com_rule.py @@ -0,0 +1,28 @@ +# Copyright (c) 2020, Ansible Project +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""A dummy custom rule module #2.""" + +from ansiblelint.rules import AnsibleLintRule + + +class ExampleComRule(AnsibleLintRule): + """A dummy custom rule class.""" + + id = "100002" diff --git a/test/custom_rules/example_inc/__init__.py b/test/custom_rules/example_inc/__init__.py new file mode 100644 index 0000000..09a0f04 --- /dev/null +++ b/test/custom_rules/example_inc/__init__.py @@ -0,0 +1 @@ +"""Dummy test module.""" diff --git a/test/custom_rules/example_inc/custom_rule.py b/test/custom_rules/example_inc/custom_rule.py new file mode 100644 index 0000000..15c389f --- /dev/null +++ b/test/custom_rules/example_inc/custom_rule.py @@ -0,0 +1,28 @@ +# Copyright (c) 2020, Ansible Project +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Dummy custom rule module.""" + +from ansiblelint.rules import AnsibleLintRule + + +class CustomRule(AnsibleLintRule): + """Dummy custom rule class.""" + + id = "100001" diff --git a/test/fixtures/__init__.py b/test/fixtures/__init__.py new file mode 100644 index 0000000..6fc2115 --- /dev/null +++ b/test/fixtures/__init__.py @@ -0,0 +1 @@ +"""Fixtures used in tests.""" diff --git a/test/fixtures/ansible-config-invalid.yml b/test/fixtures/ansible-config-invalid.yml new file mode 100644 index 0000000..9eb8fe7 --- /dev/null +++ b/test/fixtures/ansible-config-invalid.yml @@ -0,0 +1,4 @@ +--- +# invalid .ansible-lint config file +- foo +- bar diff --git a/test/fixtures/ansible-config.yml b/test/fixtures/ansible-config.yml new file mode 100644 index 0000000..673d6f1 --- /dev/null +++ b/test/fixtures/ansible-config.yml @@ -0,0 +1,3 @@ +--- +verbosity: 1 +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/config-with-extra-vars.yml b/test/fixtures/config-with-extra-vars.yml new file mode 100644 index 0000000..5d06b6a --- /dev/null +++ b/test/fixtures/config-with-extra-vars.yml @@ -0,0 +1,4 @@ +--- +extra_vars: + foo: bar + knights_favorite_word: NI diff --git a/test/fixtures/config-with-relative-path.yml b/test/fixtures/config-with-relative-path.yml new file mode 100644 index 0000000..f396347 --- /dev/null +++ b/test/fixtures/config-with-relative-path.yml @@ -0,0 +1,4 @@ +--- +exclude_paths: + - ../../examples/roles/test-role/ +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/config-with-write-all.yml b/test/fixtures/config-with-write-all.yml new file mode 100644 index 0000000..a4242c5 --- /dev/null +++ b/test/fixtures/config-with-write-all.yml @@ -0,0 +1,3 @@ +--- +write_list: + - all diff --git a/test/fixtures/config-with-write-none.yml b/test/fixtures/config-with-write-none.yml new file mode 100644 index 0000000..5dacd38 --- /dev/null +++ b/test/fixtures/config-with-write-none.yml @@ -0,0 +1,3 @@ +--- +write_list: + - none diff --git a/test/fixtures/config-with-write-subset.yml b/test/fixtures/config-with-write-subset.yml new file mode 100644 index 0000000..f83149d --- /dev/null +++ b/test/fixtures/config-with-write-subset.yml @@ -0,0 +1,4 @@ +--- +write_list: + - rule-tag + - rule-id diff --git a/test/fixtures/exclude-paths-with-expands.yml b/test/fixtures/exclude-paths-with-expands.yml new file mode 100644 index 0000000..640563c --- /dev/null +++ b/test/fixtures/exclude-paths-with-expands.yml @@ -0,0 +1,5 @@ +--- +exclude_paths: + - ~/.ansible/roles + - $HOME/.ansible/roles +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/exclude-paths.yml b/test/fixtures/exclude-paths.yml new file mode 100644 index 0000000..6af079e --- /dev/null +++ b/test/fixtures/exclude-paths.yml @@ -0,0 +1,4 @@ +--- +exclude_paths: + - ../ +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/formatting-after/fmt-1.yml b/test/fixtures/formatting-after/fmt-1.yml new file mode 100644 index 0000000..118a087 --- /dev/null +++ b/test/fixtures/formatting-after/fmt-1.yml @@ -0,0 +1,47 @@ +--- +# ^ too many newlines before +foo: bar # This is a comment has extra spaces preceding it + +fruits: # unindented sequence: + - apple + - orange +vegetables: # indented sequence: + - onion + - carrot + +quoting: + - that should have double quotes + - that should remain in single quotes + - a string with " inside + # next line has some undesired trailing spaces: + - a string with ' inside + - can't be sure! + # next line should be converted to use double quotes: + - [foo, bar] + +inline-dictionary: + - { foo: bar } # should add some spacing between curly braces and content + - { foo2: bar2 } # should reduce spacing between curly braces and content + +# YAML 1.1 Boolean-hell: https://yaml.org/type/bool.html +booleans-true: + preferred: true # YAML 1.2 compatible! + answer-1.1: true + canonical-1.1: true + canonical-upper-1.1: true + logical-1.1: true + option-1.1: true +booleans-false: + preferred: false # YAML 1.2 compatible! + answer-1.1: false + canonical-1.1: false + canonical-upper-1.1: false + logical-1.1: false + option-1.1: false + +# ^ double newline should be removed +overly-indented-vault-value: !vault | + $ANSIBLE_VAULT;1.1;AES256 + 123466303630313 + +# this file also has 3 newlines at end-of-file instead of one diff --git a/test/fixtures/formatting-after/fmt-2.yml b/test/fixtures/formatting-after/fmt-2.yml new file mode 100644 index 0000000..a162721 --- /dev/null +++ b/test/fixtures/formatting-after/fmt-2.yml @@ -0,0 +1,24 @@ +# preamble/header comment +--- +# initial comment +- foo: bar + +- baz: # over indented + - qwerty + - foobar + animals: # under indented + - crow + - pig + - giraffe + +- nothing: # null + +- octal: + - "0o123" # YAML 1.2 octal + - "0123" # YAML 1.1 octal + +- integer: + - 0 # Not an octal. See #2071 + - 10 + - 9999 + zero: 0 # Not an octal. See #2071 diff --git a/test/fixtures/formatting-after/fmt-3.yml b/test/fixtures/formatting-after/fmt-3.yml new file mode 100644 index 0000000..d8106f7 --- /dev/null +++ b/test/fixtures/formatting-after/fmt-3.yml @@ -0,0 +1,21 @@ +--- +dummy_map: # eol comment + # full line comment not indented + something: + # full line comment indented + # next full line comment indented + - or + # 1 full line comments over indented + # 2 full line comments over indented + - other + - | + # this is part of a string not a yaml comment + # also not a comment + +# comment before top-level +second_key: + - {} # should drop the extra space in flow map + # comment before non top-level + - {} + # comment before non top-level + - [] diff --git a/test/fixtures/formatting-before/fmt-1.yml b/test/fixtures/formatting-before/fmt-1.yml new file mode 100644 index 0000000..0678111 --- /dev/null +++ b/test/fixtures/formatting-before/fmt-1.yml @@ -0,0 +1,53 @@ +--- + + + +# ^ too many newlines before +foo: bar # This is a comment has extra spaces preceding it + +fruits: # unindented sequence: +- apple +- orange +vegetables: # indented sequence: + - onion + - carrot + +quoting: + - 'that should have double quotes' + - 'that should remain in single quotes' + - 'a string with " inside' + # next line has some undesired trailing spaces: + - "a string with ' inside" + - can't be sure! + # next line should be converted to use double quotes: + - ['foo', 'bar'] + +inline-dictionary: + - {foo: bar} # should add some spacing between curly braces and content + - { foo2: bar2 } # should reduce spacing between curly braces and content + +# YAML 1.1 Boolean-hell: https://yaml.org/type/bool.html +booleans-true: + preferred: true # YAML 1.2 compatible! + answer-1.1: YES + canonical-1.1: y + canonical-upper-1.1: Y + logical-1.1: True + option-1.1: on +booleans-false: + preferred: false # YAML 1.2 compatible! + answer-1.1: NO + canonical-1.1: n + canonical-upper-1.1: N + logical-1.1: False + option-1.1: off + + +# ^ double newline should be removed +overly-indented-vault-value: !vault | + $ANSIBLE_VAULT;1.1;AES256 + 123466303630313 + +# this file also has 3 newlines at end-of-file instead of one + + diff --git a/test/fixtures/formatting-before/fmt-2.yml b/test/fixtures/formatting-before/fmt-2.yml new file mode 100644 index 0000000..2941663 --- /dev/null +++ b/test/fixtures/formatting-before/fmt-2.yml @@ -0,0 +1,24 @@ +# preamble/header comment +--- +# initial comment + - foo: bar + + - baz: # over indented + - qwerty + - foobar + animals: # under indented + - crow + - pig + - giraffe + + - nothing: null # null + + - octal: + - 0o123 # YAML 1.2 octal + - 0123 # YAML 1.1 octal + + - integer: + - 0 # Not an octal. See #2071 + - 10 + - 9999 + zero: 0 # Not an octal. See #2071 diff --git a/test/fixtures/formatting-before/fmt-3.yml b/test/fixtures/formatting-before/fmt-3.yml new file mode 100644 index 0000000..c862cc4 --- /dev/null +++ b/test/fixtures/formatting-before/fmt-3.yml @@ -0,0 +1,21 @@ +--- +dummy_map: # eol comment +# full line comment not indented + something: + # full line comment indented + # next full line comment indented + - or + # 1 full line comments over indented + # 2 full line comments over indented + - other + - | + # this is part of a string not a yaml comment + # also not a comment + +# comment before top-level +second_key: + - { } # should drop the extra space in flow map +# comment before non top-level + - {} +# comment before non top-level + - [] diff --git a/test/fixtures/formatting-prettier/fmt-1.yml b/test/fixtures/formatting-prettier/fmt-1.yml new file mode 100644 index 0000000..d74c826 --- /dev/null +++ b/test/fixtures/formatting-prettier/fmt-1.yml @@ -0,0 +1,48 @@ +--- +# ^ too many newlines before +foo: bar # This is a comment has extra spaces preceding it + +fruits: # unindented sequence: + - apple + - orange +vegetables: # indented sequence: + - onion + - carrot + +quoting: + - "that should have double quotes" + - "that should remain in single quotes" + - 'a string with " inside' + # next line has some undesired trailing spaces: + - "a string with ' inside" + - can't be sure! + # next line should be converted to use double quotes: + - ["foo", "bar"] + +inline-dictionary: + - { foo: bar } # should add some spacing between curly braces and content + - { foo2: bar2 } # should reduce spacing between curly braces and content + +# YAML 1.1 Boolean-hell: https://yaml.org/type/bool.html +booleans-true: + preferred: true # YAML 1.2 compatible! + answer-1.1: YES + canonical-1.1: y + canonical-upper-1.1: Y + logical-1.1: True + option-1.1: on +booleans-false: + preferred: false # YAML 1.2 compatible! + answer-1.1: NO + canonical-1.1: n + canonical-upper-1.1: N + logical-1.1: False + option-1.1: off + +# ^ double newline should be removed +overly-indented-vault-value: !vault | + $ANSIBLE_VAULT;1.1;AES256 + 123466303630313 + +# this file also has 3 newlines at end-of-file instead of one + diff --git a/test/fixtures/formatting-prettier/fmt-2.yml b/test/fixtures/formatting-prettier/fmt-2.yml new file mode 100644 index 0000000..90ac484 --- /dev/null +++ b/test/fixtures/formatting-prettier/fmt-2.yml @@ -0,0 +1,24 @@ +# preamble/header comment +--- +# initial comment +- foo: bar + +- baz: # over indented + - qwerty + - foobar + animals: # under indented + - crow + - pig + - giraffe + +- nothing: null # null + +- octal: + - "0o123" # YAML 1.2 octal + - "0123" # YAML 1.1 octal + +- integer: + - 0 # Not an octal. See #2071 + - 10 + - 9999 + zero: 0 # Not an octal. See #2071 diff --git a/test/fixtures/formatting-prettier/fmt-3.yml b/test/fixtures/formatting-prettier/fmt-3.yml new file mode 100644 index 0000000..658d550 --- /dev/null +++ b/test/fixtures/formatting-prettier/fmt-3.yml @@ -0,0 +1,21 @@ +--- +dummy_map: # eol comment + # full line comment not indented + something: + # full line comment indented + # next full line comment indented + - or + # 1 full line comments over indented + # 2 full line comments over indented + - other + - | + # this is part of a string not a yaml comment + # also not a comment + +# comment before top-level +second_key: + - {} # should drop the extra space in flow map + # comment before non top-level + - {} + # comment before non top-level + - [] diff --git a/test/fixtures/list-rules-tests/.yamllint b/test/fixtures/list-rules-tests/.yamllint new file mode 100644 index 0000000..d9e1a25 --- /dev/null +++ b/test/fixtures/list-rules-tests/.yamllint @@ -0,0 +1,2 @@ +--- +{} diff --git a/test/fixtures/parseable.yml b/test/fixtures/parseable.yml new file mode 100644 index 0000000..a1f661a --- /dev/null +++ b/test/fixtures/parseable.yml @@ -0,0 +1,3 @@ +--- +parseable: true +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/quiet.yml b/test/fixtures/quiet.yml new file mode 100644 index 0000000..9bacbc6 --- /dev/null +++ b/test/fixtures/quiet.yml @@ -0,0 +1,3 @@ +--- +quiet: true +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/rulesdir-defaults.yml b/test/fixtures/rulesdir-defaults.yml new file mode 100644 index 0000000..c8884bb --- /dev/null +++ b/test/fixtures/rulesdir-defaults.yml @@ -0,0 +1,5 @@ +--- +rulesdir: + - ./rules +use_default_rules: true +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/rulesdir.yml b/test/fixtures/rulesdir.yml new file mode 100644 index 0000000..77c4c3d --- /dev/null +++ b/test/fixtures/rulesdir.yml @@ -0,0 +1,4 @@ +--- +rulesdir: + - ./rules +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/show-abspath.yml b/test/fixtures/show-abspath.yml new file mode 100644 index 0000000..367caff --- /dev/null +++ b/test/fixtures/show-abspath.yml @@ -0,0 +1,3 @@ +--- +display_relative_path: false +# vim: et:sw=2:syntax=2:ts=2: diff --git a/test/fixtures/show-relpath.yml b/test/fixtures/show-relpath.yml new file mode 100644 index 0000000..684f209 --- /dev/null +++ b/test/fixtures/show-relpath.yml @@ -0,0 +1,3 @@ +--- +display_relative_path: true +# vim: et:sw=2:syntax=2:ts=2: diff --git a/test/fixtures/skip-tags.yml b/test/fixtures/skip-tags.yml new file mode 100644 index 0000000..b9c215b --- /dev/null +++ b/test/fixtures/skip-tags.yml @@ -0,0 +1,4 @@ +--- +skip_list: + - bad_tag +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/strict.yml b/test/fixtures/strict.yml new file mode 100644 index 0000000..00e7aad --- /dev/null +++ b/test/fixtures/strict.yml @@ -0,0 +1,3 @@ +--- +strict: true +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/tags.yml b/test/fixtures/tags.yml new file mode 100644 index 0000000..70dd1b1 --- /dev/null +++ b/test/fixtures/tags.yml @@ -0,0 +1,4 @@ +--- +tags: + - skip_ansible_lint +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/fixtures/unknown-type.yml b/test/fixtures/unknown-type.yml new file mode 100644 index 0000000..54c6d2b --- /dev/null +++ b/test/fixtures/unknown-type.yml @@ -0,0 +1,2 @@ +--- +some: map diff --git a/test/fixtures/verbosity-tests/.yamllint b/test/fixtures/verbosity-tests/.yamllint new file mode 100644 index 0000000..d9e1a25 --- /dev/null +++ b/test/fixtures/verbosity-tests/.yamllint @@ -0,0 +1,2 @@ +--- +{} diff --git a/test/fixtures/verbosity.yml b/test/fixtures/verbosity.yml new file mode 100644 index 0000000..673d6f1 --- /dev/null +++ b/test/fixtures/verbosity.yml @@ -0,0 +1,3 @@ +--- +verbosity: 1 +# vim: et:sw=2:syntax=yaml:ts=2: diff --git a/test/foo.txt b/test/foo.txt new file mode 100644 index 0000000..94f8f1a --- /dev/null +++ b/test/foo.txt @@ -0,0 +1 @@ +Foo file diff --git a/test/local-content/README.md b/test/local-content/README.md new file mode 100644 index 0000000..2b6322a --- /dev/null +++ b/test/local-content/README.md @@ -0,0 +1,6 @@ +The reason that every roles test gets its own directory is that while they +use the same three roles, the way the tests work makes sure that when the +second one runs, the roles and their local plugins from the first test are +still known to Ansible. For that reason, their names reflect the directory +they are in to make sure that tests don't use modules/plugins found by +other tests. diff --git a/test/local-content/collections/ansible_collections/testns/test_collection/galaxy.yml b/test/local-content/collections/ansible_collections/testns/test_collection/galaxy.yml new file mode 100644 index 0000000..4cbaa67 --- /dev/null +++ b/test/local-content/collections/ansible_collections/testns/test_collection/galaxy.yml @@ -0,0 +1,4 @@ +--- +namespace: testns +name: test_collection +version: 0.1.0 diff --git a/test/local-content/collections/ansible_collections/testns/test_collection/plugins/filter/test_filter.py b/test/local-content/collections/ansible_collections/testns/test_collection/plugins/filter/test_filter.py new file mode 100644 index 0000000..58bc269 --- /dev/null +++ b/test/local-content/collections/ansible_collections/testns/test_collection/plugins/filter/test_filter.py @@ -0,0 +1,17 @@ +"""A filter plugin.""" +# pylint: disable=invalid-name + + +def a_test_filter(a, b): + """Return a string containing both a and b.""" + return f"{a}:{b}" + + +# pylint: disable=too-few-public-methods +class FilterModule: + """Filter plugin.""" + + @staticmethod + def filters(): + """Return filters.""" + return {"test_filter": a_test_filter} diff --git a/test/local-content/collections/ansible_collections/testns/test_collection/plugins/modules/test_module_2.py b/test/local-content/collections/ansible_collections/testns/test_collection/plugins/modules/test_module_2.py new file mode 100644 index 0000000..a63d06d --- /dev/null +++ b/test/local-content/collections/ansible_collections/testns/test_collection/plugins/modules/test_module_2.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 2!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-collection.yml b/test/local-content/test-collection.yml new file mode 100644 index 0000000..47b097d --- /dev/null +++ b/test/local-content/test-collection.yml @@ -0,0 +1,10 @@ +--- +- name: Use module and filter plugin from local collection + hosts: localhost + tasks: + - name: Use module from local collection + testns.test_collection.test_module_2: + - name: Use filter from local collection + ansible.builtin.assert: + that: + - 1 | testns.test_collection.test_filter(2) == '1:2' diff --git a/test/local-content/test-roles-failed-complete/roles/role1/library/test_module_1_failed_complete.py b/test/local-content/test-roles-failed-complete/roles/role1/library/test_module_1_failed_complete.py new file mode 100644 index 0000000..d9012a7 --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role1/library/test_module_1_failed_complete.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 1!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-failed-complete/roles/role1/tasks/main.yml b/test/local-content/test-roles-failed-complete/roles/role1/tasks/main.yml new file mode 100644 index 0000000..680dcab --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role1/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 1 + test_module_1_failed_complete: diff --git a/test/local-content/test-roles-failed-complete/roles/role2/tasks/main.yml b/test/local-content/test-roles-failed-complete/roles/role2/tasks/main.yml new file mode 100644 index 0000000..8646f6b --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role2/tasks/main.yml @@ -0,0 +1,11 @@ +--- +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_1_failed_complete: +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_3_failed_complete: +- name: Use local test plugin + assert: + that: + - "'2' is b_test_failed_complete '12345'" diff --git a/test/local-content/test-roles-failed-complete/roles/role2/test_plugins/b_failed_complete.py b/test/local-content/test-roles-failed-complete/roles/role2/test_plugins/b_failed_complete.py new file mode 100644 index 0000000..92bd6e7 --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role2/test_plugins/b_failed_complete.py @@ -0,0 +1,19 @@ +"""A test plugin.""" +# pylint: disable=invalid-name + + +def compatibility_in_test(a, b): + """Return True when a is contained in b.""" + return a in b + + +# pylint: disable=too-few-public-methods +class TestModule: + """Test plugin.""" + + @staticmethod + def tests(): + """Return tests.""" + return { + "b_test_failed_complete": compatibility_in_test, + } diff --git a/test/local-content/test-roles-failed-complete/roles/role3/library/test_module_3_failed_complete.py b/test/local-content/test-roles-failed-complete/roles/role3/library/test_module_3_failed_complete.py new file mode 100644 index 0000000..4d9de0e --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role3/library/test_module_3_failed_complete.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 3!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-failed-complete/roles/role3/tasks/main.yml b/test/local-content/test-roles-failed-complete/roles/role3/tasks/main.yml new file mode 100644 index 0000000..7a36734 --- /dev/null +++ b/test/local-content/test-roles-failed-complete/roles/role3/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 3 + test_module_3_failed_complete: diff --git a/test/local-content/test-roles-failed/roles/role1/library/test_module_1_failed.py b/test/local-content/test-roles-failed/roles/role1/library/test_module_1_failed.py new file mode 100644 index 0000000..d9012a7 --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role1/library/test_module_1_failed.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 1!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-failed/roles/role1/tasks/main.yml b/test/local-content/test-roles-failed/roles/role1/tasks/main.yml new file mode 100644 index 0000000..257493a --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role1/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 1 + test_module_1_failed: diff --git a/test/local-content/test-roles-failed/roles/role2/tasks/main.yml b/test/local-content/test-roles-failed/roles/role2/tasks/main.yml new file mode 100644 index 0000000..48daca6 --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role2/tasks/main.yml @@ -0,0 +1,11 @@ +--- +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_1_failed: +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_3_failed: +- name: Use local test plugin + assert: + that: + - "'2' is b_test_failed '12345'" diff --git a/test/local-content/test-roles-failed/roles/role2/test_plugins/b_failed.py b/test/local-content/test-roles-failed/roles/role2/test_plugins/b_failed.py new file mode 100644 index 0000000..4bb6167 --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role2/test_plugins/b_failed.py @@ -0,0 +1,18 @@ +"""A test plugin.""" + + +def compatibility_in_test(element, container): + """Return True when element is contained in container.""" + return element in container + + +# pylint: disable=too-few-public-methods +class TestModule: + """Test plugin.""" + + @staticmethod + def tests(): + """Return tests.""" + return { + "b_test_failed": compatibility_in_test, + } diff --git a/test/local-content/test-roles-failed/roles/role3/library/test_module_3_failed.py b/test/local-content/test-roles-failed/roles/role3/library/test_module_3_failed.py new file mode 100644 index 0000000..4d9de0e --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role3/library/test_module_3_failed.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 3!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-failed/roles/role3/tasks/main.yml b/test/local-content/test-roles-failed/roles/role3/tasks/main.yml new file mode 100644 index 0000000..ad17eb0 --- /dev/null +++ b/test/local-content/test-roles-failed/roles/role3/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 3 + test_module_3_failed: diff --git a/test/local-content/test-roles-failed/test.yml b/test/local-content/test-roles-failed/test.yml new file mode 100644 index 0000000..08ff0f6 --- /dev/null +++ b/test/local-content/test-roles-failed/test.yml @@ -0,0 +1,7 @@ +--- +- name: Use roles with local module in wrong order, so that Ansible fails + hosts: localhost + roles: + - role2 + - role3 + - role1 diff --git a/test/local-content/test-roles-success/roles/role1/library/test_module_1_success.py b/test/local-content/test-roles-success/roles/role1/library/test_module_1_success.py new file mode 100644 index 0000000..d9012a7 --- /dev/null +++ b/test/local-content/test-roles-success/roles/role1/library/test_module_1_success.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 1!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-success/roles/role1/tasks/main.yml b/test/local-content/test-roles-success/roles/role1/tasks/main.yml new file mode 100644 index 0000000..ba920af --- /dev/null +++ b/test/local-content/test-roles-success/roles/role1/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 1 + test_module_1_success: diff --git a/test/local-content/test-roles-success/roles/role2/tasks/main.yml b/test/local-content/test-roles-success/roles/role2/tasks/main.yml new file mode 100644 index 0000000..a540cf1 --- /dev/null +++ b/test/local-content/test-roles-success/roles/role2/tasks/main.yml @@ -0,0 +1,11 @@ +--- +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_1_success: +- name: Use local module from other role that has been included before this one + # If it has not been included before, loading this role fails! + test_module_3_success: +- name: Use local test plugin + assert: + that: + - "'2' is b_test_success '12345'" diff --git a/test/local-content/test-roles-success/roles/role2/test_plugins/b_success.py b/test/local-content/test-roles-success/roles/role2/test_plugins/b_success.py new file mode 100644 index 0000000..6cf2bae --- /dev/null +++ b/test/local-content/test-roles-success/roles/role2/test_plugins/b_success.py @@ -0,0 +1,18 @@ +"""A test plugin.""" + + +def compatibility_in_test(element, container): + """Return True when element contained in container.""" + return element in container + + +# pylint: disable=too-few-public-methods +class TestModule: + """Test plugin.""" + + @staticmethod + def tests(): + """Return tests.""" + return { + "b_test_success": compatibility_in_test, + } diff --git a/test/local-content/test-roles-success/roles/role3/library/test_module_3_success.py b/test/local-content/test-roles-success/roles/role3/library/test_module_3_success.py new file mode 100644 index 0000000..4d9de0e --- /dev/null +++ b/test/local-content/test-roles-success/roles/role3/library/test_module_3_success.py @@ -0,0 +1,14 @@ +#!/usr/bin/python +"""A module.""" + +from ansible.module_utils.basic import AnsibleModule + + +def main() -> None: + """Execute module.""" + module = AnsibleModule({}) + module.exit_json(msg="Hello 3!") + + +if __name__ == "__main__": + main() diff --git a/test/local-content/test-roles-success/roles/role3/tasks/main.yml b/test/local-content/test-roles-success/roles/role3/tasks/main.yml new file mode 100644 index 0000000..c77a7c8 --- /dev/null +++ b/test/local-content/test-roles-success/roles/role3/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Use local module 3 + test_module_3_success: diff --git a/test/rules/__init__.py b/test/rules/__init__.py new file mode 100644 index 0000000..28b581d --- /dev/null +++ b/test/rules/__init__.py @@ -0,0 +1 @@ +"""Tests for specific rules.""" diff --git a/test/rules/fixtures/__init__.py b/test/rules/fixtures/__init__.py new file mode 100644 index 0000000..d049bf0 --- /dev/null +++ b/test/rules/fixtures/__init__.py @@ -0,0 +1,3 @@ +"""Test rules resources.""" + +__all__ = ["ematcher", "raw_task", "unset_variable_matcher"] diff --git a/test/rules/fixtures/ematcher.py b/test/rules/fixtures/ematcher.py new file mode 100644 index 0000000..cbae696 --- /dev/null +++ b/test/rules/fixtures/ematcher.py @@ -0,0 +1,15 @@ +"""Custom rule used as fixture.""" +from ansiblelint.rules import AnsibleLintRule + + +class EMatcherRule(AnsibleLintRule): + """BANNED string found.""" + + id = "TEST0001" + description = ( + "This is a test custom rule that looks for lines " + "containing BANNED string" + ) + tags = ["fake", "dummy", "test1"] + + def match(self, line: str) -> bool: + return "BANNED" in line diff --git a/test/rules/fixtures/raw_task.md b/test/rules/fixtures/raw_task.md new file mode 100644 index 0000000..2aa6d22 --- /dev/null +++ b/test/rules/fixtures/raw_task.md @@ -0,0 +1,3 @@ +# raw-task + +This is a test rule that looks in a raw task to flag raw action params. diff --git a/test/rules/fixtures/raw_task.py b/test/rules/fixtures/raw_task.py new file mode 100644 index 0000000..672789d --- /dev/null +++ b/test/rules/fixtures/raw_task.py @@ -0,0 +1,25 @@ +"""Test Rule that needs_raw_task.""" +from __future__ import annotations + +from typing import Any + +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import AnsibleLintRule + + +class RawTaskRule(AnsibleLintRule): + """Test rule that inspects the raw task.""" + + id = "raw-task" + shortdesc = "Test rule that inspects the raw task" + tags = ["fake", "dummy", "test3"] + needs_raw_task = True + + def matchtask( + self, task: dict[str, Any], file: Lintable | None = None + ) -> bool | str: + """Match a task using __raw_task__ to inspect the module params type.""" + raw_task = task["__raw_task__"] + module = task["action"]["__ansible_module_original__"] + found_raw_task_params = not isinstance(raw_task[module], dict) + return found_raw_task_params diff --git a/test/rules/fixtures/unset_variable_matcher.py b/test/rules/fixtures/unset_variable_matcher.py new file mode 100644 index 0000000..c2b1744 --- /dev/null +++ b/test/rules/fixtures/unset_variable_matcher.py @@ -0,0 +1,16 @@ +"""Custom linting rule used as test fixture.""" +from ansiblelint.rules import AnsibleLintRule + + +class UnsetVariableMatcherRule(AnsibleLintRule): + """Line contains untemplated variable.""" + + id = "TEST0002" + description = ( + "This is a test rule that looks for lines " + + "post templating that still contain {{" + ) + tags = ["fake", "dummy", "test2"] + + def match(self, line: str) -> bool: + return "{{" in line diff --git a/test/rules/test_deprecated_module.py b/test/rules/test_deprecated_module.py new file mode 100644 index 0000000..506d91f --- /dev/null +++ b/test/rules/test_deprecated_module.py @@ -0,0 +1,25 @@ +"""Tests for deprecated-module rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.deprecated_module import DeprecatedModuleRule +from ansiblelint.testing import RunFromText + +MODULE_DEPRECATED = """ +- name: Task example + docker: + debug: test +""" + + +def test_module_deprecated() -> None: + """Test for deprecated-module.""" + collection = RulesCollection() + collection.register(DeprecatedModuleRule()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(MODULE_DEPRECATED) + assert len(results) == 1 + # based on version and blend of ansible being used, we may + # get a missing module, so we future proof the test + assert ( + "couldn't resolve module" not in results[0].message + or "Deprecated module" not in results[0].message + ) diff --git a/test/rules/test_inline_env_var.py b/test/rules/test_inline_env_var.py new file mode 100644 index 0000000..98f337e --- /dev/null +++ b/test/rules/test_inline_env_var.py @@ -0,0 +1,90 @@ +"""Tests for inline-env-var rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.inline_env_var import EnvVarsInCommandRule +from ansiblelint.testing import RunFromText + +SUCCESS_PLAY_TASKS = """ +- hosts: localhost + + tasks: + - name: Actual use of environment + shell: echo $HELLO + environment: + HELLO: hello + + - name: Use some key-value pairs + command: chdir=/tmp creates=/tmp/bobbins warn=no touch bobbins + + - name: Commands can have flags + command: abc --xyz=def blah + + - name: Commands can have equals in them + command: echo "===========" + + - name: Commands with cmd + command: + cmd: + echo "-------" + + - name: Command with stdin (ansible > 2.4) + command: /bin/cat + args: + stdin: "Hello, world!" + + - name: Use argv to send the command as a list + command: + argv: + - /bin/echo + - Hello + - World + + - name: Another use of argv + command: + args: + argv: + - echo + - testing + + - name: Environment variable with shell + shell: HELLO=hello echo $HELLO + + - name: Command with stdin_add_newline (ansible > 2.8) + command: /bin/cat + args: + stdin: "Hello, world!" + stdin_add_newline: false + + - name: Command with strip_empty_ends (ansible > 2.8) + command: echo + args: + strip_empty_ends: false +""" + +FAIL_PLAY_TASKS = """ +- hosts: localhost + + tasks: + - name: Environment variable with command + command: HELLO=hello echo $HELLO + + - name: Typo some stuff + command: cerates=/tmp/blah warn=no touch /tmp/blah +""" + + +def test_success() -> None: + """Positive test for inline-env-var.""" + collection = RulesCollection() + collection.register(EnvVarsInCommandRule()) + runner = RunFromText(collection) + results = runner.run_playbook(SUCCESS_PLAY_TASKS) + assert len(results) == 0 + + +def test_fail() -> None: + """Negative test for inline-env-var.""" + collection = RulesCollection() + collection.register(EnvVarsInCommandRule()) + runner = RunFromText(collection) + results = runner.run_playbook(FAIL_PLAY_TASKS) + assert len(results) == 2 diff --git a/test/rules/test_line_too_long.py b/test/rules/test_line_too_long.py new file mode 100644 index 0000000..3c7517b --- /dev/null +++ b/test/rules/test_line_too_long.py @@ -0,0 +1,20 @@ +"""Tests for line-too-long rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.yaml_rule import YamllintRule +from ansiblelint.testing import RunFromText + +LONG_LINE = """\ +--- +- name: Task example + debug: + msg: 'This is a very long text that is used in order to verify the rule that checks for very long lines. We do hope it was long enough to go over the line limit.' +""" # noqa: E501 + + +def test_long_line() -> None: + """Negative test for long-line.""" + collection = RulesCollection() + collection.register(YamllintRule()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(LONG_LINE) + assert len(results) == 1 diff --git a/test/rules/test_literal_compare.py b/test/rules/test_literal_compare.py new file mode 100644 index 0000000..6953cb1 --- /dev/null +++ b/test/rules/test_literal_compare.py @@ -0,0 +1,93 @@ +"""Tests for literal-compare rule.""" +import pytest + +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.literal_compare import ComparisonToLiteralBoolRule +from ansiblelint.testing import RunFromText + +PASS_WHEN = """ +- name: Example task + debug: + msg: test + when: my_var + +- name: Another example task + debug: + msg: test + when: + - 1 + 1 == 2 + - true +""" + +PASS_WHEN_NOT_FALSE = """ +- name: Example task + debug: + msg: test + when: not my_var +""" + +PASS_WHEN_NOT_NULL = """ +- name: Example task + debug: + msg: test + when: my_var not None +""" + +FAIL_LITERAL_TRUE = """ +- name: Example task + debug: + msg: test + when: my_var == True +""" + +FAIL_LITERAL_FALSE = """ +- name: Example task + debug: + msg: test + when: my_var == false + +- name: Another example task + debug: + msg: test + when: + - my_var == false +""" + + +@pytest.mark.parametrize( + ("input_str", "found_errors"), + ( + pytest.param( + PASS_WHEN, + 0, + id="pass_when", + ), + pytest.param( + PASS_WHEN_NOT_FALSE, + 0, + id="when_not_false", + ), + pytest.param( + PASS_WHEN_NOT_NULL, + 0, + id="when_not_null", + ), + pytest.param( + FAIL_LITERAL_TRUE, + 1, + id="literal_true", + ), + pytest.param( + FAIL_LITERAL_FALSE, + 2, + id="literal_false", + ), + ), +) +def test_literal_compare(input_str: str, found_errors: int) -> None: + """Test literal-compare.""" + collection = RulesCollection() + collection.register(ComparisonToLiteralBoolRule()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(input_str) + assert len(results) == found_errors diff --git a/test/rules/test_meta_change_from_default.py b/test/rules/test_meta_change_from_default.py new file mode 100644 index 0000000..31125d3 --- /dev/null +++ b/test/rules/test_meta_change_from_default.py @@ -0,0 +1,42 @@ +"""Tests for meta-incorrect rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.meta_incorrect import MetaChangeFromDefaultRule +from ansiblelint.testing import RunFromText + +DEFAULT_GALAXY_INFO = """ +galaxy_info: + author: your name + description: your description + company: your company (optional) + license: license (GPLv2, CC-BY, etc) +""" + +NO_GALAXY_INFO = """ +galaxy_information: + author: your name + description: your description + company: your company (optional) + license: license (GPLv2, CC-BY, etc) +""" + + +def test_default_galaxy_info() -> None: + """Test for meta-incorrect.""" + collection = RulesCollection() + collection.register(MetaChangeFromDefaultRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(DEFAULT_GALAXY_INFO) + # Disabled check because default value is not passing schema validation + # assert "Should change default metadata: author" in str(results) + assert "Should change default metadata: description" in str(results) + assert "Should change default metadata: company" in str(results) + assert "Should change default metadata: license" in str(results) + + +def test_no_galaxy_info() -> None: + """Test for no galaxy info passed to meta-incorrect.""" + collection = RulesCollection() + collection.register(MetaChangeFromDefaultRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(NO_GALAXY_INFO) + assert results == [] diff --git a/test/rules/test_meta_no_info.py b/test/rules/test_meta_no_info.py new file mode 100644 index 0000000..bbadec6 --- /dev/null +++ b/test/rules/test_meta_no_info.py @@ -0,0 +1,98 @@ +"""Tests for meta-no-info rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.meta_no_info import MetaMainHasInfoRule +from ansiblelint.testing import RunFromText + +NO_GALAXY_INFO = """ +author: the author +description: this meta/main.yml has no galaxy_info +""" + +MISSING_INFO = """ +galaxy_info: + # author: the author + description: Testing if meta contains values + company: Not applicable + + license: MIT + + # min_ansible_version: 2.5 + + platforms: + - name: Fedora + versions: + - 25 + - missing_name: No name + versions: + - 25 +""" + +BAD_TYPES = """ +galaxy_info: + author: 007 + description: ['Testing meta'] + company: Not applicable + + license: MIT + + min_ansible_version: 2.5 + + platforms: Fedora +""" + +PLATFORMS_LIST_OF_STR = """ +galaxy_info: + author: '007' + description: 'Testing meta' + company: Not applicable + + license: MIT + + min_ansible_version: 2.5 + + platforms: ['Fedora', 'EL'] +""" + + +def test_no_galaxy_info() -> None: + """Test meta-no-info with missing galaxy_info.""" + collection = RulesCollection() + collection.register(MetaMainHasInfoRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(NO_GALAXY_INFO) + assert len(results) == 1 + assert "No 'galaxy_info' found" in str(results) + + +def test_missing_info() -> None: + """Test meta-no-info.""" + collection = RulesCollection() + collection.register(MetaMainHasInfoRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(MISSING_INFO) + assert len(results) == 3 + assert "Role info should contain author" in str(results) + assert "Role info should contain min_ansible_version" in str(results) + assert "Platform should contain name" in str(results) + + +def test_bad_types() -> None: + """Test meta-no-info with bad types.""" + collection = RulesCollection() + collection.register(MetaMainHasInfoRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(BAD_TYPES) + assert len(results) == 3 + assert "author should be a string" in str(results) + assert "description should be a string" in str(results) + assert "Platforms should be a list of dictionaries" in str(results) + + +def test_platform_list_of_str() -> None: + """Test meta-no-info with platforms.""" + collection = RulesCollection() + collection.register(MetaMainHasInfoRule()) + runner = RunFromText(collection) + results = runner.run_role_meta_main(PLATFORMS_LIST_OF_STR) + assert len(results) == 1 + assert "Platforms should be a list of dictionaries" in str(results) diff --git a/test/rules/test_meta_video_links.py b/test/rules/test_meta_video_links.py new file mode 100644 index 0000000..3772ea2 --- /dev/null +++ b/test/rules/test_meta_video_links.py @@ -0,0 +1,46 @@ +"""Tests for meta-video-links rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.meta_video_links import MetaVideoLinksRule +from ansiblelint.testing import RunFromText + +META_VIDEO_LINKS = """ +galaxy_info: + video_links: + - url: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be + title: Proper format + - url: https://drive.google.com/file/d/1spYR51l8SqQqvAhSdZE7/view + title: Check for VIDEO_REGEXP validity and break + - https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be + - my_bad_key: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be + title: This has a bad key + - url: www.acme.com/vid + title: Bad format of url +""" + +META_NO_GALAXY_INFO = """ +galaxy_information: + video_links: + - url: https://www.youtube.com/watch?v=aWmRepTSFKs&feature=youtu.be +""" + + +def test_video_links() -> None: + """Test meta_video_links.""" + collection = RulesCollection() + collection.register(MetaVideoLinksRule()) + runner = RunFromText(collection) + + results = runner.run_role_meta_main(META_VIDEO_LINKS) + assert "Expected item in 'video_links' to be a dictionary" in str(results) + assert "'video_links' to contain only keys 'url' and 'title'" in str(results) + assert "URL format 'www.acme.com/vid' is not recognized" in str(results) + + +def test_meta_video_links_no_galaxy_info() -> None: + """Test meta_video_links.""" + collection = RulesCollection() + collection.register(MetaVideoLinksRule()) + runner = RunFromText(collection) + + results = runner.run_role_meta_main(META_NO_GALAXY_INFO) + assert len(results) == 0 diff --git a/test/rules/test_no_changed_when.py b/test/rules/test_no_changed_when.py new file mode 100644 index 0000000..c89d8f4 --- /dev/null +++ b/test/rules/test_no_changed_when.py @@ -0,0 +1,23 @@ +"""Tests for no-change-when rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.no_changed_when import CommandHasChangesCheckRule +from ansiblelint.runner import Runner + + +def test_command_changes_positive() -> None: + """Positive test for no-changed-when.""" + collection = RulesCollection() + collection.register(CommandHasChangesCheckRule()) + success = "examples/playbooks/command-check-success.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + +def test_command_changes_negative() -> None: + """Negative test for no-changed-when.""" + collection = RulesCollection() + collection.register(CommandHasChangesCheckRule()) + failure = "examples/playbooks/command-check-failure.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 2 diff --git a/test/rules/test_no_relative_paths.py b/test/rules/test_no_relative_paths.py new file mode 100644 index 0000000..6999cbb --- /dev/null +++ b/test/rules/test_no_relative_paths.py @@ -0,0 +1,53 @@ +"""Tests for no-relative-paths rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.no_relative_paths import RoleRelativePath +from ansiblelint.testing import RunFromText + +FAIL_TASKS = """ +- name: Template example + template: + src: ../templates/foo.j2 + dest: /etc/file.conf +- name: Copy example + copy: + src: ../files/foo.conf + dest: /etc/foo.conf +# Removed from test suite as module is no longer part of core +# - name: Some win_template example +# win_template: +# src: ../win_templates/file.conf.j2 +# dest: file.conf +# - name: Some win_copy example +# win_copy: +# src: ../files/foo.conf +# dest: renamed-foo.conf +""" + +SUCCESS_TASKS = """ +- name: Content example with no src + copy: + content: '# This file was moved to /etc/other.conf' + dest: /etc/mine.conf +- name: Copy example + copy: + src: /home/example/files/foo.conf + dest: /etc/foo.conf +""" + + +def test_no_relative_paths_fail() -> None: + """Negative test for no-relative-paths.""" + collection = RulesCollection() + collection.register(RoleRelativePath()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(FAIL_TASKS) + assert len(results) == 2 + + +def test_no_relative_paths_success() -> None: + """Positive test for no-relative-paths.""" + collection = RulesCollection() + collection.register(RoleRelativePath()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(SUCCESS_TASKS) + assert len(results) == 0 diff --git a/test/rules/test_package_latest.py b/test/rules/test_package_latest.py new file mode 100644 index 0000000..5631f02 --- /dev/null +++ b/test/rules/test_package_latest.py @@ -0,0 +1,23 @@ +"""Tests for package-latest rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.package_latest import PackageIsNotLatestRule +from ansiblelint.runner import Runner + + +def test_package_not_latest_positive() -> None: + """Positive test for package-latest.""" + collection = RulesCollection() + collection.register(PackageIsNotLatestRule()) + success = "examples/playbooks/package-check-success.yml" + good_runner = Runner(success, rules=collection) + assert [] == good_runner.run() + + +def test_package_not_latest_negative() -> None: + """Negative test for package-latest.""" + collection = RulesCollection() + collection.register(PackageIsNotLatestRule()) + failure = "examples/playbooks/package-check-failure.yml" + bad_runner = Runner(failure, rules=collection) + errs = bad_runner.run() + assert len(errs) == 4 diff --git a/test/rules/test_risky_shell_pipe.py b/test/rules/test_risky_shell_pipe.py new file mode 100644 index 0000000..b2ecb67 --- /dev/null +++ b/test/rules/test_risky_shell_pipe.py @@ -0,0 +1,94 @@ +"""Tests for risky-shell-pile rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.risky_shell_pipe import ShellWithoutPipefail +from ansiblelint.testing import RunFromText + +FAIL_TASKS = """ +--- +- hosts: localhost + become: no + tasks: + - name: Pipeline without pipefail + shell: false | cat + + - name: Pipeline with or and pipe, no pipefail + shell: false || true | cat + + - shell: | + df | grep '/dev' +""" + +SUCCESS_TASKS = """ +--- +- hosts: localhost + become: no + tasks: + - name: Pipeline with pipefail + shell: set -o pipefail && false | cat + + - name: Pipeline with pipefail, multi-line + shell: | + set -o pipefail + false | cat + + - name: Pipeline with pipefail, complex set + shell: | + set -e -x -o pipefail + false | cat + + - name: Pipeline with pipefail, complex set + shell: | + set -e -x -o pipefail + false | cat + + - name: Pipeline with pipefail, complex set + shell: | + set -eo pipefail + false | cat + + - name: Pipeline with pipefail not at first line + shell: | + echo foo + set -eo pipefail + false | cat + + - name: Pipeline without pipefail, ignoring errors + shell: false | cat + ignore_errors: true + + - name: Non-pipeline without pipefail + shell: "true" + + - name: Command without pipefail + command: "true" + + - name: Shell with or + shell: + false || true + + - shell: | + set -o pipefail + df | grep '/dev' + + - name: Should not fail due to ignore_errors being true + shell: false | cat + ignore_errors: true +""" + + +def test_fail() -> None: + """Negative test for risky-shell-pipe.""" + collection = RulesCollection() + collection.register(ShellWithoutPipefail()) + runner = RunFromText(collection) + results = runner.run_playbook(FAIL_TASKS) + assert len(results) == 3 + + +def test_success() -> None: + """Positive test for risky-shell-pipe.""" + collection = RulesCollection() + collection.register(ShellWithoutPipefail()) + runner = RunFromText(collection) + results = runner.run_playbook(SUCCESS_TASKS) + assert len(results) == 0 diff --git a/test/rules/test_role_names.py b/test/rules/test_role_names.py new file mode 100644 index 0000000..4d55a61 --- /dev/null +++ b/test/rules/test_role_names.py @@ -0,0 +1,86 @@ +"""Test the RoleNames rule.""" +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import pytest +from _pytest.fixtures import SubRequest + +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.role_name import RoleNames +from ansiblelint.runner import Runner + +ROLE_NAME_VALID = "test_role" + +TASK_MINIMAL = """ +- name: Some task + ping: +""" + +ROLE_MINIMAL = {"tasks": {"main.yml": TASK_MINIMAL}} +ROLE_META_EMPTY = {"meta": {"main.yml": ""}} + +ROLE_WITH_EMPTY_META = {**ROLE_MINIMAL, **ROLE_META_EMPTY} + +PLAY_INCLUDE_ROLE = f""" +- hosts: all + roles: + - {ROLE_NAME_VALID} +""" + + +@pytest.fixture(name="test_rules_collection") +def fixture_test_rules_collection() -> RulesCollection: + """Instantiate a roles collection for tests.""" + collection = RulesCollection() + collection.register(RoleNames()) + return collection + + +def dict_to_files(parent_dir: Path, file_dict: dict[str, Any]) -> None: + """Write a nested dict to a file and directory structure below parent_dir.""" + for file, content in file_dict.items(): + if isinstance(content, dict): + directory = parent_dir / file + directory.mkdir() + dict_to_files(directory, content) + else: + (parent_dir / file).write_text(content) + + +@pytest.fixture(name="playbook_path") +def fixture_playbook_path(request: SubRequest, tmp_path: Path) -> str: + """Create a playbook with a role in a temporary directory.""" + playbook_text = request.param[0] + role_name = request.param[1] + role_layout = request.param[2] + role_path = tmp_path / role_name + role_path.mkdir() + dict_to_files(role_path, role_layout) + play_path = tmp_path / "playbook.yml" + play_path.write_text(playbook_text) + return str(play_path) + + +@pytest.mark.parametrize( + ("playbook_path", "messages"), + ( + pytest.param( + (PLAY_INCLUDE_ROLE, ROLE_NAME_VALID, ROLE_WITH_EMPTY_META), + [], + id="ROLE_EMPTY_META", + ), + ), + indirect=("playbook_path",), +) +def test_role_name( + test_rules_collection: RulesCollection, playbook_path: str, messages: list[str] +) -> None: + """Lint a playbook and compare the expected messages with the actual messages.""" + runner = Runner(playbook_path, rules=test_rules_collection) + results = runner.run() + assert len(results) == len(messages) + results_text = str(results) + for message in messages: + assert message in results_text diff --git a/test/rules/test_use_handler_rather_than_when_changed.py b/test/rules/test_use_handler_rather_than_when_changed.py new file mode 100644 index 0000000..2b70bf6 --- /dev/null +++ b/test/rules/test_use_handler_rather_than_when_changed.py @@ -0,0 +1,86 @@ +"""Tests for no-handler rule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.no_handler import UseHandlerRatherThanWhenChangedRule +from ansiblelint.testing import RunFromText + +SUCCESS_TASKS = """ +- name: Print helpful error message + debug: + var: result + when: result.failed + +- name: Do something when hello is output + debug: + msg: why isn't this a handler + when: result.stdout == "hello" + +- name: Never actually debug + debug: + var: result + when: False + +- name: "Don't execute this step" + debug: + msg: "debug message" + when: + - false + +- name: Check when with a list + debug: + var: result + when: + - conditionA + - conditionB +""" + + +FAIL_TASKS = """ +- name: Execute command + command: echo hello + register: result + +- name: This should be a handler + debug: + msg: why isn't this a handler + when: result.changed + +- name: This should be a handler 2 + debug: + msg: why isn't this a handler + when: result|changed + +- name: This should be a handler 3 + debug: + msg: why isn't this a handler + when: result.changed == true + +- name: This should be a handler 4 + debug: + msg: why isn't this a handler + when: result['changed'] == true + +- name: This should be a handler 5 + debug: + msg: why isn't this a handler + when: + - result['changed'] == true + - another_condition +""" + + +def test_no_handler_success() -> None: + """Positive test for no-handler.""" + collection = RulesCollection() + collection.register(UseHandlerRatherThanWhenChangedRule()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(SUCCESS_TASKS) + assert len(results) == 0 + + +def test_no_handler_fail() -> None: + """Negative test for no-handler.""" + collection = RulesCollection() + collection.register(UseHandlerRatherThanWhenChangedRule()) + runner = RunFromText(collection) + results = runner.run_role_tasks_main(FAIL_TASKS) + assert len(results) == 5 diff --git a/test/schemas/.mocharc.json b/test/schemas/.mocharc.json new file mode 100644 index 0000000..0148197 --- /dev/null +++ b/test/schemas/.mocharc.json @@ -0,0 +1,7 @@ +{ + "colors": true, + "extension": ["ts"], + "require": "ts-node/register", + "slow": "500", + "spec": "src/**/*.spec.ts" +} diff --git a/test/schemas/data/licenses.json b/test/schemas/data/licenses.json new file mode 100644 index 0000000..9c7cdbc --- /dev/null +++ b/test/schemas/data/licenses.json @@ -0,0 +1,5766 @@ +{ + "licenseListVersion": "3.13", + "licenses": [ + { + "reference": "https://spdx.org/licenses/bzip2-1.0.6.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.6.json", + "referenceNumber": 0, + "name": "bzip2 and libbzip2 License v1.0.6", + "licenseId": "bzip2-1.0.6", + "seeAlso": [ + "https://sourceware.org/git/?p\u003dbzip2.git;a\u003dblob;f\u003dLICENSE;hb\u003dbzip2-1.0.6", + "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Glulxe.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Glulxe.json", + "referenceNumber": 1, + "name": "Glulxe License", + "licenseId": "Glulxe", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Glulxe" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Parity-7.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Parity-7.0.0.json", + "referenceNumber": 2, + "name": "The Parity Public License 7.0.0", + "licenseId": "Parity-7.0.0", + "seeAlso": [ + "https://paritylicense.com/versions/7.0.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OML.json", + "referenceNumber": 3, + "name": "Open Market License", + "licenseId": "OML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Open_Market_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/UCL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UCL-1.0.json", + "referenceNumber": 4, + "name": "Upstream Compatibility License v1.0", + "licenseId": "UCL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/UCL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/UPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/UPL-1.0.json", + "referenceNumber": 5, + "name": "Universal Permissive License v1.0", + "licenseId": "UPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/UPL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-Protection.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Protection.json", + "referenceNumber": 6, + "name": "BSD Protection License", + "licenseId": "BSD-Protection", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD_Protection_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OCLC-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OCLC-2.0.json", + "referenceNumber": 7, + "name": "OCLC Research Public License 2.0", + "licenseId": "OCLC-2.0", + "seeAlso": [ + "http://www.oclc.org/research/activities/software/license/v2final.htm", + "https://opensource.org/licenses/OCLC-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/eCos-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/eCos-2.0.json", + "referenceNumber": 8, + "name": "eCos license version 2.0", + "licenseId": "eCos-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/ecos-license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Multics.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Multics.json", + "referenceNumber": 9, + "name": "Multics License", + "licenseId": "Multics", + "seeAlso": [ + "https://opensource.org/licenses/Multics" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/IPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IPL-1.0.json", + "referenceNumber": 10, + "name": "IBM Public License v1.0", + "licenseId": "IPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/IPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/IPA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IPA.json", + "referenceNumber": 11, + "name": "IPA Font License", + "licenseId": "IPA", + "seeAlso": [ + "https://opensource.org/licenses/IPA" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/eGenix.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/eGenix.json", + "referenceNumber": 12, + "name": "eGenix.com Public License 1.1.0", + "licenseId": "eGenix", + "seeAlso": [ + "http://www.egenix.com/products/eGenix.com-Public-License-1.1.0.pdf", + "https://fedoraproject.org/wiki/Licensing/eGenix.com_Public_License_1.1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Glide.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Glide.json", + "referenceNumber": 13, + "name": "3dfx Glide License", + "licenseId": "Glide", + "seeAlso": [ + "http://www.users.on.net/~triforce/glidexp/COPYING.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Entessa.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Entessa.json", + "referenceNumber": 14, + "name": "Entessa Public License v1.0", + "licenseId": "Entessa", + "seeAlso": [ + "https://opensource.org/licenses/Entessa" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/FSFUL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFUL.json", + "referenceNumber": 15, + "name": "FSF Unlimited License", + "licenseId": "FSFUL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Nunit.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/Nunit.json", + "referenceNumber": 16, + "name": "Nunit License", + "licenseId": "Nunit", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Nunit" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-2.0-no-copyleft-exception.json", + "referenceNumber": 17, + "name": "Mozilla Public License 2.0 (no copyleft exception)", + "licenseId": "MPL-2.0-no-copyleft-exception", + "seeAlso": [ + "http://www.mozilla.org/MPL/2.0/", + "https://opensource.org/licenses/MPL-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/libpng-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libpng-2.0.json", + "referenceNumber": 18, + "name": "PNG Reference Library version 2", + "licenseId": "libpng-2.0", + "seeAlso": [ + "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.1.json", + "referenceNumber": 19, + "name": "Open LDAP Public License v2.2.1", + "licenseId": "OLDAP-2.2.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d4bc786f34b50aa301be6f5600f58a980070f481e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/curl.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/curl.json", + "referenceNumber": 20, + "name": "curl License", + "licenseId": "curl", + "seeAlso": [ + "https://github.com/bagder/curl/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ANTLR-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ANTLR-PD.json", + "referenceNumber": 21, + "name": "ANTLR Software Rights Notice", + "licenseId": "ANTLR-PD", + "seeAlso": [ + "http://www.antlr2.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0.json", + "referenceNumber": 22, + "name": "Creative Commons Attribution Share Alike 2.0 Generic", + "licenseId": "CC-BY-SA-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-P-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-P-1.1.json", + "referenceNumber": 23, + "name": "Licence Libre du Québec – Permissive version 1.1", + "licenseId": "LiLiQ-P-1.1", + "seeAlso": [ + "https://forge.gouv.qc.ca/licence/fr/liliq-v1-1/", + "http://opensource.org/licenses/LiLiQ-P-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/TCP-wrappers.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TCP-wrappers.json", + "referenceNumber": 24, + "name": "TCP Wrappers License", + "licenseId": "TCP-wrappers", + "seeAlso": [ + "http://rc.quest.com/topics/openssh/license.php#tcpwrappers" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Unicode-DFS-2016.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2016.json", + "referenceNumber": 25, + "name": "Unicode License Agreement - Data Files and Software (2016)", + "licenseId": "Unicode-DFS-2016", + "seeAlso": [ + "http://www.unicode.org/copyright.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/ODbL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ODbL-1.0.json", + "referenceNumber": 26, + "name": "Open Data Commons Open Database License v1.0", + "licenseId": "ODbL-1.0", + "seeAlso": [ + "http://www.opendatacommons.org/licenses/odbl/1.0/", + "https://opendatacommons.org/licenses/odbl/1-0/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.3a.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.3a.json", + "referenceNumber": 27, + "name": "LaTeX Project Public License v1.3a", + "licenseId": "LPPL-1.3a", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-3a.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.2.json", + "referenceNumber": 28, + "name": "CERN Open Hardware Licence v1.2", + "licenseId": "CERN-OHL-1.2", + "seeAlso": [ + "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.2" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ADSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ADSL.json", + "referenceNumber": 29, + "name": "Amazon Digital Services License", + "licenseId": "ADSL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AmazonDigitalServicesLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDDL-1.0.json", + "referenceNumber": 30, + "name": "Common Development and Distribution License 1.0", + "licenseId": "CDDL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/cddl1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Motosoto.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Motosoto.json", + "referenceNumber": 31, + "name": "Motosoto License", + "licenseId": "Motosoto", + "seeAlso": [ + "https://opensource.org/licenses/Motosoto" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BUSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BUSL-1.1.json", + "referenceNumber": 32, + "name": "Business Source License 1.1", + "licenseId": "BUSL-1.1", + "seeAlso": [ + "https://mariadb.com/bsl11/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-1.0.json", + "referenceNumber": 33, + "name": "Open Government Licence v1.0", + "licenseId": "OGL-UK-1.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/1/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/xinetd.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/xinetd.json", + "referenceNumber": 34, + "name": "xinetd License", + "licenseId": "xinetd", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Xinetd_License" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Imlib2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Imlib2.json", + "referenceNumber": 35, + "name": "Imlib2 License", + "licenseId": "Imlib2", + "seeAlso": [ + "http://trac.enlightenment.org/e/browser/trunk/imlib2/COPYING", + "https://git.enlightenment.org/legacy/imlib2.git/tree/COPYING" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SNIA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SNIA.json", + "referenceNumber": 36, + "name": "SNIA Public License 1.1", + "licenseId": "SNIA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/SNIA_Public_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGTSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGTSL.json", + "referenceNumber": 37, + "name": "Open Group Test Suite License", + "licenseId": "OGTSL", + "seeAlso": [ + "http://www.opengroup.org/testing/downloads/The_Open_Group_TSL.txt", + "https://opensource.org/licenses/OGTSL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/TMate.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TMate.json", + "referenceNumber": 38, + "name": "TMate Open Source License", + "licenseId": "TMate", + "seeAlso": [ + "http://svnkit.com/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OCCT-PL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OCCT-PL.json", + "referenceNumber": 39, + "name": "Open CASCADE Technology Public License", + "licenseId": "OCCT-PL", + "seeAlso": [ + "http://www.opencascade.com/content/occt-public-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0-or-later.json", + "referenceNumber": 40, + "name": "GNU General Public License v1.0 or later", + "licenseId": "GPL-1.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/YPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/YPL-1.1.json", + "referenceNumber": 41, + "name": "Yahoo! Public License v1.1", + "licenseId": "YPL-1.1", + "seeAlso": [ + "http://www.zimbra.com/license/yahoo_public_license_1.1.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-2.0.json", + "referenceNumber": 42, + "name": "CeCILL Free Software License Agreement v2.0", + "licenseId": "CECILL-2.0", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V2-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/PHP-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PHP-3.0.json", + "referenceNumber": 43, + "name": "PHP License v3.0", + "licenseId": "PHP-3.0", + "seeAlso": [ + "http://www.php.net/license/3_0.txt", + "https://opensource.org/licenses/PHP-3.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BlueOak-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BlueOak-1.0.0.json", + "referenceNumber": 44, + "name": "Blue Oak Model License 1.0.0", + "licenseId": "BlueOak-1.0.0", + "seeAlso": [ + "https://blueoakcouncil.org/license/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zimbra-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zimbra-1.3.json", + "referenceNumber": 45, + "name": "Zimbra Public License v1.3", + "licenseId": "Zimbra-1.3", + "seeAlso": [ + "http://web.archive.org/web/20100302225219/http://www.zimbra.com/license/zimbra-public-license-1-3.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OGC-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGC-1.0.json", + "referenceNumber": 46, + "name": "OGC Software License, Version 1.0", + "licenseId": "OGC-1.0", + "seeAlso": [ + "https://www.ogc.org/ogc/software/1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NASA-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NASA-1.3.json", + "referenceNumber": 47, + "name": "NASA Open Source Agreement 1.3", + "licenseId": "NASA-1.3", + "seeAlso": [ + "http://ti.arc.nasa.gov/opensource/nosa/", + "https://opensource.org/licenses/NASA-1.3" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/SPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SPL-1.0.json", + "referenceNumber": 48, + "name": "Sun Public License v1.0", + "licenseId": "SPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/SPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Intel-ACPI.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Intel-ACPI.json", + "referenceNumber": 49, + "name": "Intel ACPI Software License Agreement", + "licenseId": "Intel-ACPI", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Intel_ACPI_Software_License_Agreement" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SISSL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SISSL-1.2.json", + "referenceNumber": 50, + "name": "Sun Industry Standards Source License v1.2", + "licenseId": "SISSL-1.2", + "seeAlso": [ + "http://gridscheduler.sourceforge.net/Gridengine_SISSL_license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-Canada-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-Canada-2.0.json", + "referenceNumber": 51, + "name": "Open Government Licence - Canada", + "licenseId": "OGL-Canada-2.0", + "seeAlso": [ + "https://open.canada.ca/en/open-government-licence-canada" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-US.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-US.json", + "referenceNumber": 52, + "name": "Creative Commons Attribution 3.0 United States", + "licenseId": "CC-BY-3.0-US", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/us/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/copyleft-next-0.3.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.1.json", + "referenceNumber": 53, + "name": "copyleft-next 0.3.1", + "licenseId": "copyleft-next-0.3.1", + "seeAlso": [ + "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-or-later.json", + "referenceNumber": 54, + "name": "GNU Free Documentation License v1.1 or later - invariants", + "licenseId": "GFDL-1.1-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GL2PS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GL2PS.json", + "referenceNumber": 55, + "name": "GL2PS License", + "licenseId": "GL2PS", + "seeAlso": [ + "http://www.geuz.org/gl2ps/COPYING.GL2PS" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MS-PL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MS-PL.json", + "referenceNumber": 56, + "name": "Microsoft Public License", + "licenseId": "MS-PL", + "seeAlso": [ + "http://www.microsoft.com/opensource/licenses.mspx", + "https://opensource.org/licenses/MS-PL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SCEA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SCEA.json", + "referenceNumber": 57, + "name": "SCEA Shared Source License", + "licenseId": "SCEA", + "seeAlso": [ + "http://research.scea.com/scea_shared_source_license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.5.json", + "referenceNumber": 58, + "name": "Creative Commons Attribution No Derivatives 2.5 Generic", + "licenseId": "CC-BY-ND-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SSPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSPL-1.0.json", + "referenceNumber": 59, + "name": "Server Side Public License, v 1", + "licenseId": "SSPL-1.0", + "seeAlso": [ + "https://www.mongodb.com/licensing/server-side-public-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-86.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-86.json", + "referenceNumber": 60, + "name": "Spencer License 86", + "licenseId": "Spencer-86", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.0.json", + "referenceNumber": 61, + "name": "LaTeX Project Public License v1.0", + "licenseId": "LPPL-1.0", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-0.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-only.json", + "referenceNumber": 62, + "name": "GNU General Public License v3.0 only", + "licenseId": "GPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-autoconf-exception.json", + "referenceNumber": 63, + "name": "GNU General Public License v2.0 w/Autoconf exception", + "licenseId": "GPL-2.0-with-autoconf-exception", + "seeAlso": [ + "http://ac-archive.sourceforge.net/doc/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Giftware.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Giftware.json", + "referenceNumber": 64, + "name": "Giftware License", + "licenseId": "Giftware", + "seeAlso": [ + "http://liballeg.org/license.html#allegro-4-the-giftware-license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0.json", + "referenceNumber": 65, + "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported", + "licenseId": "CC-BY-NC-ND-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CNRI-Python.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Python.json", + "referenceNumber": 66, + "name": "CNRI Python License", + "licenseId": "CNRI-Python", + "seeAlso": [ + "https://opensource.org/licenses/CNRI-Python" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-or-later.json", + "referenceNumber": 67, + "name": "GNU Free Documentation License v1.2 or later - no invariants", + "licenseId": "GFDL-1.2-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Afmparse.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Afmparse.json", + "referenceNumber": 68, + "name": "Afmparse License", + "licenseId": "Afmparse", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Afmparse" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-LBNL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-LBNL.json", + "referenceNumber": 69, + "name": "Lawrence Berkeley National Labs BSD variant license", + "licenseId": "BSD-3-Clause-LBNL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/LBNLBSD" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/NCGL-UK-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NCGL-UK-2.0.json", + "referenceNumber": 70, + "name": "Non-Commercial Government Licence", + "licenseId": "NCGL-UK-2.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/non-commercial-government-licence/version/2/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0+.json", + "referenceNumber": 71, + "name": "GNU General Public License v1.0 or later", + "licenseId": "GPL-1.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PHP-3.01.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PHP-3.01.json", + "referenceNumber": 72, + "name": "PHP License v3.01", + "licenseId": "PHP-3.01", + "seeAlso": [ + "http://www.php.net/license/3_01.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Leptonica.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Leptonica.json", + "referenceNumber": 73, + "name": "Leptonica License", + "licenseId": "Leptonica", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Leptonica" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/bzip2-1.0.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/bzip2-1.0.5.json", + "referenceNumber": 74, + "name": "bzip2 and libbzip2 License v1.0.5", + "licenseId": "bzip2-1.0.5", + "seeAlso": [ + "https://sourceware.org/bzip2/1.0.5/bzip2-manual-1.0.5.html", + "http://bzip.org/1.0.5/bzip2-manual-1.0.5.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NIST-PD-fallback.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NIST-PD-fallback.json", + "referenceNumber": 75, + "name": "NIST Public Domain Notice with license fallback", + "licenseId": "NIST-PD-fallback", + "seeAlso": [ + "https://github.com/usnistgov/jsip/blob/59700e6926cbe96c5cdae897d9a7d2656b42abe3/LICENSE", + "https://github.com/usnistgov/fipy/blob/86aaa5c2ba2c6f1be19593c5986071cf6568cc34/LICENSE.rst" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-1.0.json", + "referenceNumber": 76, + "name": "Open Software License 1.0", + "licenseId": "OSL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/OSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1.json", + "referenceNumber": 77, + "name": "SIL Open Font License 1.1", + "licenseId": "OFL-1.1", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/JasPer-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JasPer-2.0.json", + "referenceNumber": 78, + "name": "JasPer License", + "licenseId": "JasPer-2.0", + "seeAlso": [ + "http://www.ece.uvic.ca/~mdadams/jasper/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Naumen.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Naumen.json", + "referenceNumber": 79, + "name": "Naumen Public License", + "licenseId": "Naumen", + "seeAlso": [ + "https://opensource.org/licenses/Naumen" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-only.json", + "referenceNumber": 80, + "name": "Affero General Public License v1.0 only", + "licenseId": "AGPL-1.0-only", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/C-UDA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/C-UDA-1.0.json", + "referenceNumber": 81, + "name": "Computational Use of Data Agreement v1.0", + "licenseId": "C-UDA-1.0", + "seeAlso": [ + "https://github.com/microsoft/Computational-Use-of-Data-Agreement/blob/master/C-UDA-1.0.md", + "https://cdla.dev/computational-use-of-data-agreement-v1-0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT.json", + "referenceNumber": 82, + "name": "MIT License", + "licenseId": "MIT", + "seeAlso": [ + "https://opensource.org/licenses/MIT" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/TCL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TCL.json", + "referenceNumber": 83, + "name": "TCL/TK License", + "licenseId": "TCL", + "seeAlso": [ + "http://www.tcl.tk/software/tcltk/license.html", + "https://fedoraproject.org/wiki/Licensing/TCL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-only.json", + "referenceNumber": 84, + "name": "GNU Lesser General Public License v3.0 only", + "licenseId": "LGPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ECL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ECL-1.0.json", + "referenceNumber": 85, + "name": "Educational Community License v1.0", + "licenseId": "ECL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/ECL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-2.0.json", + "referenceNumber": 86, + "name": "Mozilla Public License 2.0", + "licenseId": "MPL-2.0", + "seeAlso": [ + "http://www.mozilla.org/MPL/2.0/", + "https://opensource.org/licenses/MPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-1.0.json", + "referenceNumber": 87, + "name": "Creative Commons Attribution Non Commercial 1.0 Generic", + "licenseId": "CC-BY-NC-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.5.json", + "referenceNumber": 88, + "name": "Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic", + "licenseId": "CC-BY-NC-ND-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.3c.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.3c.json", + "referenceNumber": 89, + "name": "LaTeX Project Public License v1.3c", + "licenseId": "LPPL-1.3c", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-3c.txt", + "https://opensource.org/licenses/LPPL-1.3c" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/JSON.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JSON.json", + "referenceNumber": 90, + "name": "JSON License", + "licenseId": "JSON", + "seeAlso": [ + "http://www.json.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NBPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NBPL-1.0.json", + "referenceNumber": 91, + "name": "Net Boolean Public License v1", + "licenseId": "NBPL-1.0", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d37b4b3f6cc4bf34e1d3dec61e69914b9819d8894" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CAL-1.0-Combined-Work-Exception.json", + "referenceNumber": 92, + "name": "Cryptographic Autonomy License 1.0 (Combined Work Exception)", + "licenseId": "CAL-1.0-Combined-Work-Exception", + "seeAlso": [ + "http://cryptographicautonomylicense.com/license-text.html", + "https://opensource.org/licenses/CAL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Unlicense.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unlicense.json", + "referenceNumber": 93, + "name": "The Unlicense", + "licenseId": "Unlicense", + "seeAlso": [ + "https://unlicense.org/" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Python-GPL-Compatible.json", + "referenceNumber": 94, + "name": "CNRI Python Open Source GPL Compatible License Agreement", + "licenseId": "CNRI-Python-GPL-Compatible", + "seeAlso": [ + "http://www.python.org/download/releases/1.6.1/download_win/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TU-Berlin-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TU-Berlin-2.0.json", + "referenceNumber": 95, + "name": "Technische Universitaet Berlin License 2.0", + "licenseId": "TU-Berlin-2.0", + "seeAlso": [ + "https://github.com/CorsixTH/deps/blob/fd339a9f526d1d9c9f01ccf39e438a015da50035/licences/libgsm.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NLPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NLPL.json", + "referenceNumber": 96, + "name": "No Limit Public License", + "licenseId": "NLPL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/NLPL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0-or-later.json", + "referenceNumber": 97, + "name": "GNU Lesser General Public License v3.0 or later", + "licenseId": "LGPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Beerware.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Beerware.json", + "referenceNumber": 98, + "name": "Beerware License", + "licenseId": "Beerware", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Beerware", + "https://people.freebsd.org/~phk/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NGPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NGPL.json", + "referenceNumber": 99, + "name": "Nethack General Public License", + "licenseId": "NGPL", + "seeAlso": [ + "https://opensource.org/licenses/NGPL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/ZPL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-2.1.json", + "referenceNumber": 100, + "name": "Zope Public License 2.1", + "licenseId": "ZPL-2.1", + "seeAlso": [ + "http://old.zope.org/Resources/ZPL/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Saxpath.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Saxpath.json", + "referenceNumber": 101, + "name": "Saxpath License", + "licenseId": "Saxpath", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Saxpath_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.0-UK.json", + "referenceNumber": 102, + "name": "Creative Commons Attribution Share Alike 2.0 England and Wales", + "licenseId": "CC-BY-SA-2.0-UK", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.0/uk/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-2.1.json", + "referenceNumber": 103, + "name": "CeCILL Free Software License Agreement v2.1", + "licenseId": "CECILL-2.1", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V2.1-en.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/XFree86-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/XFree86-1.1.json", + "referenceNumber": 104, + "name": "XFree86 License 1.1", + "licenseId": "XFree86-1.1", + "seeAlso": [ + "http://www.xfree86.org/current/LICENSE4.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/IBM-pibs.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IBM-pibs.json", + "referenceNumber": 105, + "name": "IBM PowerPC Initialization and Boot Software", + "licenseId": "IBM-pibs", + "seeAlso": [ + "http://git.denx.de/?p\u003du-boot.git;a\u003dblob;f\u003darch/powerpc/cpu/ppc4xx/miiphy.c;h\u003d297155fdafa064b955e53e9832de93bfb0cfb85b;hb\u003d9fab4bf4cc077c21e43941866f3f2c196f28670d" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zlib.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zlib.json", + "referenceNumber": 106, + "name": "zlib License", + "licenseId": "Zlib", + "seeAlso": [ + "http://www.zlib.net/zlib_license.html", + "https://opensource.org/licenses/Zlib" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/StandardML-NJ.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/StandardML-NJ.json", + "referenceNumber": 107, + "name": "Standard ML of New Jersey License", + "licenseId": "StandardML-NJ", + "seeAlso": [ + "http://www.smlnj.org//license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RPSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPSL-1.0.json", + "referenceNumber": 108, + "name": "RealNetworks Public Source License v1.0", + "licenseId": "RPSL-1.0", + "seeAlso": [ + "https://helixcommunity.org/content/rpsl", + "https://opensource.org/licenses/RPSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-1.0.json", + "referenceNumber": 109, + "name": "CeCILL Free Software License Agreement v1.0", + "licenseId": "CECILL-1.0", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V1-fr.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-3.0.json", + "referenceNumber": 110, + "name": "Open Government Licence v3.0", + "licenseId": "OGL-UK-3.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause-Shortened.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-Shortened.json", + "referenceNumber": 111, + "name": "BSD 4 Clause Shortened", + "licenseId": "BSD-4-Clause-Shortened", + "seeAlso": [ + "https://metadata.ftp-master.debian.org/changelogs//main/a/arpwatch/arpwatch_2.1a15-7_copyright" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Watcom-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Watcom-1.0.json", + "referenceNumber": 112, + "name": "Sybase Open Watcom Public License 1.0", + "licenseId": "Watcom-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Watcom-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Wsuipa.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Wsuipa.json", + "referenceNumber": 113, + "name": "Wsuipa License", + "licenseId": "Wsuipa", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Wsuipa" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TU-Berlin-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TU-Berlin-1.0.json", + "referenceNumber": 114, + "name": "Technische Universitaet Berlin License 1.0", + "licenseId": "TU-Berlin-1.0", + "seeAlso": [ + "https://github.com/swh/ladspa/blob/7bf6f3799fdba70fda297c2d8fd9f526803d9680/gsm/COPYRIGHT" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Latex2e.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Latex2e.json", + "referenceNumber": 115, + "name": "Latex2e License", + "licenseId": "Latex2e", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Latex2e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-B.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-B.json", + "referenceNumber": 116, + "name": "CeCILL-B Free Software License Agreement", + "licenseId": "CECILL-B", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.0.json", + "referenceNumber": 117, + "name": "European Union Public License 1.0", + "licenseId": "EUPL-1.0", + "seeAlso": [ + "http://ec.europa.eu/idabc/en/document/7330.html", + "http://ec.europa.eu/idabc/servlets/Doc027f.pdf?id\u003d31096" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-or-later.json", + "referenceNumber": 118, + "name": "GNU Free Documentation License v1.2 or later", + "licenseId": "GFDL-1.2-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPL-1.0.json", + "referenceNumber": 119, + "name": "Common Public License 1.0", + "licenseId": "CPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-3.0.json", + "referenceNumber": 120, + "name": "Creative Commons Attribution No Derivatives 3.0 Unported", + "licenseId": "CC-BY-ND-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NTP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NTP.json", + "referenceNumber": 121, + "name": "NTP License", + "licenseId": "NTP", + "seeAlso": [ + "https://opensource.org/licenses/NTP" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/W3C-19980720.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C-19980720.json", + "referenceNumber": 122, + "name": "W3C Software Notice and License (1998-07-20)", + "licenseId": "W3C-19980720", + "seeAlso": [ + "http://www.w3.org/Consortium/Legal/copyright-software-19980720.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-only.json", + "referenceNumber": 123, + "name": "GNU Free Documentation License v1.3 only", + "licenseId": "GFDL-1.3-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-4.0.json", + "referenceNumber": 124, + "name": "Creative Commons Attribution Share Alike 4.0 International", + "licenseId": "CC-BY-SA-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.1.json", + "referenceNumber": 125, + "name": "European Union Public License 1.1", + "licenseId": "EUPL-1.1", + "seeAlso": [ + "https://joinup.ec.europa.eu/software/page/eupl/licence-eupl", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl1.1.-licence-en_0.pdf", + "https://opensource.org/licenses/EUPL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-only.json", + "referenceNumber": 126, + "name": "GNU Free Documentation License v1.1 only - no invariants", + "licenseId": "GFDL-1.1-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/JPNIC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/JPNIC.json", + "referenceNumber": 127, + "name": "Japan Network Information Center License", + "licenseId": "JPNIC", + "seeAlso": [ + "https://gitlab.isc.org/isc-projects/bind9/blob/master/COPYRIGHT#L366" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AMPAS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AMPAS.json", + "referenceNumber": 128, + "name": "Academy of Motion Picture Arts and Sciences BSD", + "licenseId": "AMPAS", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD#AMPASBSD" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause.json", + "referenceNumber": 129, + "name": "BSD 3-Clause \"New\" or \"Revised\" License", + "licenseId": "BSD-3-Clause", + "seeAlso": [ + "https://opensource.org/licenses/BSD-3-Clause" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MIT-0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-0.json", + "referenceNumber": 130, + "name": "MIT No Attribution", + "licenseId": "MIT-0", + "seeAlso": [ + "https://github.com/aws/mit-0", + "https://romanrm.net/mit-zero", + "https://github.com/awsdocs/aws-cloud9-user-guide/blob/master/LICENSE-SAMPLECODE" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Intel.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Intel.json", + "referenceNumber": 131, + "name": "Intel Open Source License", + "licenseId": "Intel", + "seeAlso": [ + "https://opensource.org/licenses/Intel" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/O-UDA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/O-UDA-1.0.json", + "referenceNumber": 132, + "name": "Open Use of Data Agreement v1.0", + "licenseId": "O-UDA-1.0", + "seeAlso": [ + "https://github.com/microsoft/Open-Use-of-Data-Agreement/blob/v1.0/O-UDA-1.0.md", + "https://cdla.dev/open-use-of-data-agreement-v1-0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPL-1.0.json", + "referenceNumber": 133, + "name": "Netscape Public License v1.0", + "licenseId": "NPL-1.0", + "seeAlso": [ + "http://www.mozilla.org/MPL/NPL/1.0/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.5.json", + "referenceNumber": 134, + "name": "Creative Commons Attribution Non Commercial 2.5 Generic", + "licenseId": "CC-BY-NC-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Mup.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Mup.json", + "referenceNumber": 135, + "name": "Mup License", + "licenseId": "Mup", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Mup" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Newsletr.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Newsletr.json", + "referenceNumber": 136, + "name": "Newsletr License", + "licenseId": "Newsletr", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Newsletr" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PDDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PDDL-1.0.json", + "referenceNumber": 137, + "name": "Open Data Commons Public Domain Dedication \u0026 License 1.0", + "licenseId": "PDDL-1.0", + "seeAlso": [ + "http://opendatacommons.org/licenses/pddl/1.0/", + "https://opendatacommons.org/licenses/pddl/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SMLNJ.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SMLNJ.json", + "referenceNumber": 138, + "name": "Standard ML of New Jersey License", + "licenseId": "SMLNJ", + "seeAlso": [ + "https://www.smlnj.org/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-1-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-1-Clause.json", + "referenceNumber": 139, + "name": "BSD 1-Clause License", + "licenseId": "BSD-1-Clause", + "seeAlso": [ + "https://svnweb.freebsd.org/base/head/include/ifaddrs.h?revision\u003d326823" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/SimPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SimPL-2.0.json", + "referenceNumber": 140, + "name": "Simple Public License 2.0", + "licenseId": "SimPL-2.0", + "seeAlso": [ + "https://opensource.org/licenses/SimPL-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.2.json", + "referenceNumber": 141, + "name": "Open LDAP Public License v1.2", + "licenseId": "OLDAP-1.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d42b0383c50c299977b5893ee695cf4e486fb0dc7" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Xnet.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xnet.json", + "referenceNumber": 142, + "name": "X.Net License", + "licenseId": "Xnet", + "seeAlso": [ + "https://opensource.org/licenses/Xnet" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause.json", + "referenceNumber": 143, + "name": "BSD 2-Clause \"Simplified\" License", + "licenseId": "BSD-2-Clause", + "seeAlso": [ + "https://opensource.org/licenses/BSD-2-Clause" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/AML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AML.json", + "referenceNumber": 144, + "name": "Apple MIT License", + "licenseId": "AML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Apple_MIT_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-only.json", + "referenceNumber": 145, + "name": "GNU Free Documentation License v1.2 only", + "licenseId": "GFDL-1.2-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Info-ZIP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Info-ZIP.json", + "referenceNumber": 146, + "name": "Info-ZIP License", + "licenseId": "Info-ZIP", + "seeAlso": [ + "http://www.info-zip.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DSDP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DSDP.json", + "referenceNumber": 147, + "name": "DSDP License", + "licenseId": "DSDP", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/DSDP" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0.json", + "referenceNumber": 148, + "name": "Affero General Public License v1.0", + "licenseId": "AGPL-1.0", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause-UC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause-UC.json", + "referenceNumber": 149, + "name": "BSD-4-Clause (University of California-Specific)", + "licenseId": "BSD-4-Clause-UC", + "seeAlso": [ + "http://www.freebsd.org/copyright/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-only.json", + "referenceNumber": 150, + "name": "GNU Lesser General Public License v2.1 only", + "licenseId": "LGPL-2.1-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0.json", + "referenceNumber": 151, + "name": "SIL Open Font License 1.0", + "licenseId": "OFL-1.0", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CDL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDL-1.0.json", + "referenceNumber": 152, + "name": "Common Documentation License 1.0", + "licenseId": "CDL-1.0", + "seeAlso": [ + "http://www.opensource.apple.com/cdl/", + "https://fedoraproject.org/wiki/Licensing/Common_Documentation_License", + "https://www.gnu.org/licenses/license-list.html#ACDL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LAL-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LAL-1.3.json", + "referenceNumber": 153, + "name": "Licence Art Libre 1.3", + "licenseId": "LAL-1.3", + "seeAlso": [ + "https://artlibre.org/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Sendmail.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sendmail.json", + "referenceNumber": 154, + "name": "Sendmail License", + "licenseId": "Sendmail", + "seeAlso": [ + "http://www.sendmail.com/pdfs/open_source/sendmail_license.pdf", + "https://web.archive.org/web/20160322142305/https://www.sendmail.com/pdfs/open_source/sendmail_license.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OGDL-Taiwan-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGDL-Taiwan-1.0.json", + "referenceNumber": 155, + "name": "Taiwan Open Government Data License, version 1.0", + "licenseId": "OGDL-Taiwan-1.0", + "seeAlso": [ + "https://data.gov.tw/license" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zimbra-1.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zimbra-1.4.json", + "referenceNumber": 156, + "name": "Zimbra Public License v1.4", + "licenseId": "Zimbra-1.4", + "seeAlso": [ + "http://www.zimbra.com/legal/zimbra-public-license-1-4" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Borceux.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Borceux.json", + "referenceNumber": 157, + "name": "Borceux license", + "licenseId": "Borceux", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Borceux" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OSL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-3.0.json", + "referenceNumber": 158, + "name": "Open Software License 3.0", + "licenseId": "OSL-3.0", + "seeAlso": [ + "https://web.archive.org/web/20120101081418/http://rosenlaw.com:80/OSL3.0.htm", + "https://opensource.org/licenses/OSL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AMDPLPA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AMDPLPA.json", + "referenceNumber": 159, + "name": "AMD\u0027s plpa_map.c License", + "licenseId": "AMDPLPA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AMD_plpa_map_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-3.0.json", + "referenceNumber": 160, + "name": "Creative Commons Attribution Non Commercial Share Alike 3.0 Unported", + "licenseId": "CC-BY-NC-SA-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.1.json", + "referenceNumber": 161, + "name": "Open LDAP Public License v2.1", + "licenseId": "OLDAP-2.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db0d176738e96a0d3b9f85cb51e140a86f21be715" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-FreeBSD.json", + "referenceNumber": 162, + "name": "BSD 2-Clause FreeBSD License", + "licenseId": "BSD-2-Clause-FreeBSD", + "seeAlso": [ + "http://www.freebsd.org/copyright/freebsd-license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CPOL-1.02.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPOL-1.02.json", + "referenceNumber": 163, + "name": "Code Project Open License 1.02", + "licenseId": "CPOL-1.02", + "seeAlso": [ + "http://www.codeproject.com/info/cpol10.aspx" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-1.0.json", + "referenceNumber": 164, + "name": "Mozilla Public License 1.0", + "licenseId": "MPL-1.0", + "seeAlso": [ + "http://www.mozilla.org/MPL/MPL-1.0.html", + "https://opensource.org/licenses/MPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/blessing.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/blessing.json", + "referenceNumber": 165, + "name": "SQLite Blessing", + "licenseId": "blessing", + "seeAlso": [ + "https://www.sqlite.org/src/artifact/e33a4df7e32d742a?ln\u003d4-9", + "https://sqlite.org/src/artifact/df5091916dbb40e6" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Parity-6.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Parity-6.0.0.json", + "referenceNumber": 166, + "name": "The Parity Public License 6.0.0", + "licenseId": "Parity-6.0.0", + "seeAlso": [ + "https://paritylicense.com/versions/6.0.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AFL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-3.0.json", + "referenceNumber": 167, + "name": "Academic Free License v3.0", + "licenseId": "AFL-3.0", + "seeAlso": [ + "http://www.rosenlaw.com/AFL3.0.htm", + "https://opensource.org/licenses/afl-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SGI-B-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-1.0.json", + "referenceNumber": 168, + "name": "SGI Free Software License B v1.0", + "licenseId": "SGI-B-1.0", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.1.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-Patent.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Patent.json", + "referenceNumber": 169, + "name": "BSD-2-Clause Plus Patent License", + "licenseId": "BSD-2-Clause-Patent", + "seeAlso": [ + "https://opensource.org/licenses/BSDplusPatent" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0-cl8.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-cl8.json", + "referenceNumber": 170, + "name": "Artistic License 1.0 w/clause 8", + "licenseId": "Artistic-1.0-cl8", + "seeAlso": [ + "https://opensource.org/licenses/Artistic-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-4.0.json", + "referenceNumber": 171, + "name": "Creative Commons Attribution Non Commercial No Derivatives 4.0 International", + "licenseId": "CC-BY-NC-ND-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Apache-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-1.1.json", + "referenceNumber": 172, + "name": "Apache License 1.1", + "licenseId": "Apache-1.1", + "seeAlso": [ + "http://apache.org/licenses/LICENSE-1.1", + "https://opensource.org/licenses/Apache-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/ErlPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ErlPL-1.1.json", + "referenceNumber": 173, + "name": "Erlang Public License v1.1", + "licenseId": "ErlPL-1.1", + "seeAlso": [ + "http://www.erlang.org/EPLICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0-RFN.json", + "referenceNumber": 174, + "name": "SIL Open Font License 1.0 with Reserved Font Name", + "licenseId": "OFL-1.0-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-3.0.json", + "referenceNumber": 175, + "name": "Creative Commons Attribution Non Commercial 3.0 Unported", + "licenseId": "CC-BY-NC-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-2.0.json", + "referenceNumber": 176, + "name": "Creative Commons Attribution Non Commercial 2.0 Generic", + "licenseId": "CC-BY-NC-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MakeIndex.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MakeIndex.json", + "referenceNumber": 177, + "name": "MakeIndex License", + "licenseId": "MakeIndex", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MakeIndex" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Barr.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Barr.json", + "referenceNumber": 178, + "name": "Barr License", + "licenseId": "Barr", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Barr" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.1-JP.json", + "referenceNumber": 179, + "name": "Creative Commons Attribution Share Alike 2.1 Japan", + "licenseId": "CC-BY-SA-2.1-JP", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.1/jp/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-no-invariants-only.json", + "referenceNumber": 180, + "name": "GNU Free Documentation License v1.2 only - no invariants", + "licenseId": "GFDL-1.2-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Hippocratic-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Hippocratic-2.1.json", + "referenceNumber": 181, + "name": "Hippocratic License 2.1", + "licenseId": "Hippocratic-2.1", + "seeAlso": [ + "https://firstdonoharm.dev/version/2/1/license.html", + "https://github.com/EthicalSource/hippocratic-license/blob/58c0e646d64ff6fbee275bfe2b9492f914e3ab2a/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Adobe-2006.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Adobe-2006.json", + "referenceNumber": 182, + "name": "Adobe Systems Incorporated Source Code License Agreement", + "licenseId": "Adobe-2006", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AdobeLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-2.0.json", + "referenceNumber": 183, + "name": "Open Software License 2.0", + "licenseId": "OSL-2.0", + "seeAlso": [ + "http://web.archive.org/web/20041020171434/http://www.rosenlaw.com/osl2.0.html" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-4.0.json", + "referenceNumber": 184, + "name": "Creative Commons Attribution Non Commercial Share Alike 4.0 International", + "licenseId": "CC-BY-NC-SA-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1-or-later.json", + "referenceNumber": 185, + "name": "GNU Lesser General Public License v2.1 or later", + "licenseId": "LGPL-2.1-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PolyForm-Noncommercial-1.0.0.json", + "referenceNumber": 186, + "name": "PolyForm Noncommercial License 1.0.0", + "licenseId": "PolyForm-Noncommercial-1.0.0", + "seeAlso": [ + "https://polyformproject.org/licenses/noncommercial/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OpenSSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OpenSSL.json", + "referenceNumber": 187, + "name": "OpenSSL License", + "licenseId": "OpenSSL", + "seeAlso": [ + "http://www.openssl.org/source/license.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-GCC-exception.json", + "referenceNumber": 188, + "name": "GNU General Public License v3.0 w/GCC Runtime Library exception", + "licenseId": "GPL-3.0-with-GCC-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/gcc-exception-3.1.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OPL-1.0.json", + "referenceNumber": 189, + "name": "Open Public License v1.0", + "licenseId": "OPL-1.0", + "seeAlso": [ + "http://old.koalateam.com/jackaroo/OPL_1_0.TXT", + "https://fedoraproject.org/wiki/Licensing/Open_Public_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Attribution.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Attribution.json", + "referenceNumber": 190, + "name": "BSD with attribution", + "licenseId": "BSD-3-Clause-Attribution", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/BSD_with_Attribution" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Rdisc.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Rdisc.json", + "referenceNumber": 191, + "name": "Rdisc License", + "licenseId": "Rdisc", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Rdisc_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MS-RL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MS-RL.json", + "referenceNumber": 192, + "name": "Microsoft Reciprocal License", + "licenseId": "MS-RL", + "seeAlso": [ + "http://www.microsoft.com/opensource/licenses.mspx", + "https://opensource.org/licenses/MS-RL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EUDatagrid.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUDatagrid.json", + "referenceNumber": 193, + "name": "EU DataGrid Software License", + "licenseId": "EUDatagrid", + "seeAlso": [ + "http://eu-datagrid.web.cern.ch/eu-datagrid/license.html", + "https://opensource.org/licenses/EUDatagrid" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPLLR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPLLR.json", + "referenceNumber": 194, + "name": "Lesser General Public License For Linguistic Resources", + "licenseId": "LGPLLR", + "seeAlso": [ + "http://www-igm.univ-mlv.fr/~unitex/lgpllr.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AFL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-2.0.json", + "referenceNumber": 195, + "name": "Academic Free License v2.0", + "licenseId": "AFL-2.0", + "seeAlso": [ + "http://wayback.archive.org/web/20060924134533/http://www.opensource.org/licenses/afl-2.0.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MIT-Modern-Variant.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-Modern-Variant.json", + "referenceNumber": 196, + "name": "MIT License Modern Variant", + "licenseId": "MIT-Modern-Variant", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:MIT#Modern_Variants", + "https://ptolemy.berkeley.edu/copyright.htm", + "https://pirlwww.lpl.arizona.edu/resources/guide/software/PerlTk/Tixlic.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-only.json", + "referenceNumber": 197, + "name": "GNU Free Documentation License v1.3 only - invariants", + "licenseId": "GFDL-1.3-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-R-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-R-1.1.json", + "referenceNumber": 198, + "name": "Licence Libre du Québec – Réciprocité version 1.1", + "licenseId": "LiLiQ-R-1.1", + "seeAlso": [ + "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-liliq-r-v1-1/", + "http://opensource.org/licenses/LiLiQ-R-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CDLA-Permissive-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDLA-Permissive-1.0.json", + "referenceNumber": 199, + "name": "Community Data License Agreement Permissive 1.0", + "licenseId": "CDLA-Permissive-1.0", + "seeAlso": [ + "https://cdla.io/permissive-1-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/DRL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DRL-1.0.json", + "referenceNumber": 200, + "name": "Detection Rule License 1.0", + "licenseId": "DRL-1.0", + "seeAlso": [ + "https://github.com/Neo23x0/sigma/blob/master/LICENSE.Detection.Rules.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-Source-Code.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-Source-Code.json", + "referenceNumber": 201, + "name": "BSD Source Code Attribution", + "licenseId": "BSD-Source-Code", + "seeAlso": [ + "https://github.com/robbiehanson/CocoaHTTPServer/blob/master/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-1.0.json", + "referenceNumber": 202, + "name": "Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic", + "licenseId": "CC-BY-NC-ND-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd-nc/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GLWTPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GLWTPL.json", + "referenceNumber": 203, + "name": "Good Luck With That Public License", + "licenseId": "GLWTPL", + "seeAlso": [ + "https://github.com/me-shaon/GLWTPL/commit/da5f6bc734095efbacb442c0b31e33a65b9d6e85" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/VSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/VSL-1.0.json", + "referenceNumber": 204, + "name": "Vovida Software License v1.0", + "licenseId": "VSL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/VSL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CPAL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CPAL-1.0.json", + "referenceNumber": 205, + "name": "Common Public Attribution License 1.0", + "licenseId": "CPAL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CPAL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/HaskellReport.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HaskellReport.json", + "referenceNumber": 206, + "name": "Haskell Language Report License", + "licenseId": "HaskellReport", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Haskell_Language_Report_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.1.json", + "referenceNumber": 207, + "name": "Apple Public Source License 1.1", + "licenseId": "APSL-1.1", + "seeAlso": [ + "http://www.opensource.apple.com/source/IOSerialFamily/IOSerialFamily-7/APPLE_LICENSE" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-or-later.json", + "referenceNumber": 208, + "name": "GNU General Public License v2.0 or later", + "licenseId": "GPL-2.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Modification.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Modification.json", + "referenceNumber": 209, + "name": "BSD 3-Clause Modification", + "licenseId": "BSD-3-Clause-Modification", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:BSD#Modification_Variant" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.3.json", + "referenceNumber": 210, + "name": "Open LDAP Public License v2.3", + "licenseId": "OLDAP-2.3", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dd32cf54a32d581ab475d23c810b0a7fbaf8d63c3" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1-no-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1-no-RFN.json", + "referenceNumber": 211, + "name": "SIL Open Font License 1.1 with no Reserved Font Name", + "licenseId": "OFL-1.1-no-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BitTorrent-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.0.json", + "referenceNumber": 212, + "name": "BitTorrent Open Source License v1.0", + "licenseId": "BitTorrent-1.0", + "seeAlso": [ + "http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/licenses/BitTorrent?r1\u003d1.1\u0026r2\u003d1.1.1.1\u0026diff_format\u003ds" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NRL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NRL.json", + "referenceNumber": 213, + "name": "NRL License", + "licenseId": "NRL", + "seeAlso": [ + "http://web.mit.edu/network/isakmp/nrllicense.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2.json", + "referenceNumber": 214, + "name": "GNU Free Documentation License v1.2", + "licenseId": "GFDL-1.2", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MirOS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MirOS.json", + "referenceNumber": 215, + "name": "The MirOS Licence", + "licenseId": "MirOS", + "seeAlso": [ + "https://opensource.org/licenses/MirOS" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Sleepycat.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sleepycat.json", + "referenceNumber": 216, + "name": "Sleepycat License", + "licenseId": "Sleepycat", + "seeAlso": [ + "https://opensource.org/licenses/Sleepycat" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.1.json", + "referenceNumber": 217, + "name": "LaTeX Project Public License v1.1", + "licenseId": "LPPL-1.1", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/WTFPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/WTFPL.json", + "referenceNumber": 218, + "name": "Do What The F*ck You Want To Public License", + "licenseId": "WTFPL", + "seeAlso": [ + "http://www.wtfpl.net/about/", + "http://sam.zoy.org/wtfpl/COPYING" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PolyForm-Small-Business-1.0.0.json", + "referenceNumber": 219, + "name": "PolyForm Small Business License 1.0.0", + "licenseId": "PolyForm-Small-Business-1.0.0", + "seeAlso": [ + "https://polyformproject.org/licenses/small-business/1.0.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Caldera.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Caldera.json", + "referenceNumber": 220, + "name": "Caldera License", + "licenseId": "Caldera", + "seeAlso": [ + "http://www.lemis.com/grog/UNIX/ancient-source-all.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HTMLTIDY.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HTMLTIDY.json", + "referenceNumber": 221, + "name": "HTML Tidy License", + "licenseId": "HTMLTIDY", + "seeAlso": [ + "https://github.com/htacg/tidy-html5/blob/next/README/LICENSE.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SISSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SISSL.json", + "referenceNumber": 222, + "name": "Sun Industry Standards Source License v1.1", + "licenseId": "SISSL", + "seeAlso": [ + "http://www.openoffice.org/licenses/sissl_license.html", + "https://opensource.org/licenses/SISSL" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MITNFA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MITNFA.json", + "referenceNumber": 223, + "name": "MIT +no-false-attribs license", + "licenseId": "MITNFA", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MITNFA" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/0BSD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/0BSD.json", + "referenceNumber": 224, + "name": "BSD Zero Clause License", + "licenseId": "0BSD", + "seeAlso": [ + "http://landley.net/toybox/license.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC0-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC0-1.0.json", + "referenceNumber": 225, + "name": "Creative Commons Zero v1.0 Universal", + "licenseId": "CC0-1.0", + "seeAlso": [ + "https://creativecommons.org/publicdomain/zero/1.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0+.json", + "referenceNumber": 226, + "name": "GNU Lesser General Public License v3.0 or later", + "licenseId": "LGPL-3.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CDLA-Sharing-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDLA-Sharing-1.0.json", + "referenceNumber": 227, + "name": "Community Data License Agreement Sharing 1.0", + "licenseId": "CDLA-Sharing-1.0", + "seeAlso": [ + "https://cdla.io/sharing-1-0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-bison-exception.json", + "referenceNumber": 228, + "name": "GNU General Public License v2.0 w/Bison exception", + "licenseId": "GPL-2.0-with-bison-exception", + "seeAlso": [ + "http://git.savannah.gnu.org/cgit/bison.git/tree/data/yacc.c?id\u003d193d7c7054ba7197b0789e14965b739162319b5e#n141" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EFL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EFL-2.0.json", + "referenceNumber": 229, + "name": "Eiffel Forum License v2.0", + "licenseId": "EFL-2.0", + "seeAlso": [ + "http://www.eiffel-nice.org/license/eiffel-forum-license-2.html", + "https://opensource.org/licenses/EFL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AFL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-1.1.json", + "referenceNumber": 230, + "name": "Academic Free License v1.1", + "licenseId": "AFL-1.1", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-1.1.txt", + "http://wayback.archive.org/web/20021004124254/http://www.opensource.org/licenses/academic.php" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-2.0.json", + "referenceNumber": 231, + "name": "Creative Commons Attribution 2.0 Generic", + "licenseId": "CC-BY-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RPL-1.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPL-1.5.json", + "referenceNumber": 232, + "name": "Reciprocal Public License 1.5", + "licenseId": "RPL-1.5", + "seeAlso": [ + "https://opensource.org/licenses/RPL-1.5" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/MulanPSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MulanPSL-1.0.json", + "referenceNumber": 233, + "name": "Mulan Permissive Software License, Version 1", + "licenseId": "MulanPSL-1.0", + "seeAlso": [ + "https://license.coscl.org.cn/MulanPSL/", + "https://github.com/yuwenlong/longphp/blob/25dfb70cc2a466dc4bb55ba30901cbce08d164b5/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0+.json", + "referenceNumber": 234, + "name": "GNU General Public License v3.0 or later", + "licenseId": "GPL-3.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/HPND-sell-variant.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND-sell-variant.json", + "referenceNumber": 235, + "name": "Historical Permission Notice and Disclaimer - sell variant", + "licenseId": "HPND-sell-variant", + "seeAlso": [ + "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/net/sunrpc/auth_gss/gss_generic_token.c?h\u003dv4.19" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SSH-OpenSSH.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSH-OpenSSH.json", + "referenceNumber": 236, + "name": "SSH OpenSSH license", + "licenseId": "SSH-OpenSSH", + "seeAlso": [ + "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/LICENCE#L10" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.1.json", + "referenceNumber": 237, + "name": "Open LDAP Public License v1.1", + "licenseId": "OLDAP-1.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d806557a5ad59804ef3a44d5abfbe91d706b0791f" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BitTorrent-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BitTorrent-1.1.json", + "referenceNumber": 238, + "name": "BitTorrent Open Source License v1.1", + "licenseId": "BitTorrent-1.1", + "seeAlso": [ + "http://directory.fsf.org/wiki/License:BitTorrentOSL1.1" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0.json", + "referenceNumber": 239, + "name": "Artistic License 1.0", + "licenseId": "Artistic-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Artistic-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/SSH-short.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SSH-short.json", + "referenceNumber": 240, + "name": "SSH short notice", + "licenseId": "SSH-short", + "seeAlso": [ + "https://github.com/openssh/openssh-portable/blob/1b11ea7c58cd5c59838b5fa574cd456d6047b2d4/pathnames.h", + "http://web.mit.edu/kolya/.f/root/athena.mit.edu/sipb.mit.edu/project/openssh/OldFiles/src/openssh-2.9.9p2/ssh-add.1", + "https://joinup.ec.europa.eu/svn/lesoll/trunk/italc/lib/src/dsa_key.cpp" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0-AT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0-AT.json", + "referenceNumber": 241, + "name": "Creative Commons Attribution 3.0 Austria", + "licenseId": "CC-BY-3.0-AT", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/at/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-CMU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-CMU.json", + "referenceNumber": 242, + "name": "CMU License", + "licenseId": "MIT-CMU", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:MIT?rd\u003dLicensing/MIT#CMU_Style", + "https://github.com/python-pillow/Pillow/blob/fffb426092c8db24a5f4b6df243a8a3c01fb63cd/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-or-later.json", + "referenceNumber": 243, + "name": "GNU Free Documentation License v1.3 or later - no invariants", + "licenseId": "GFDL-1.3-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TOSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TOSL.json", + "referenceNumber": 244, + "name": "Trusster Open Source License", + "licenseId": "TOSL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/TOSL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-open-group.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-open-group.json", + "referenceNumber": 245, + "name": "MIT Open Group variant", + "licenseId": "MIT-open-group", + "seeAlso": [ + "https://gitlab.freedesktop.org/xorg/app/iceauth/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xvinfo/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xsetroot/-/blob/master/COPYING", + "https://gitlab.freedesktop.org/xorg/app/xauth/-/blob/master/COPYING" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.6.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.6.json", + "referenceNumber": 246, + "name": "Open LDAP Public License v2.6", + "licenseId": "OLDAP-2.6", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d1cae062821881f41b73012ba816434897abf4205" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-only.json", + "referenceNumber": 247, + "name": "GNU Free Documentation License v1.1 only", + "licenseId": "GFDL-1.1-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/FreeBSD-DOC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FreeBSD-DOC.json", + "referenceNumber": 248, + "name": "FreeBSD Documentation License", + "licenseId": "FreeBSD-DOC", + "seeAlso": [ + "https://www.freebsd.org/copyright/freebsd-doc-license/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0.json", + "referenceNumber": 249, + "name": "GNU General Public License v2.0 only", + "licenseId": "GPL-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Fair.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Fair.json", + "referenceNumber": 250, + "name": "Fair License", + "licenseId": "Fair", + "seeAlso": [ + "http://fairlicense.org/", + "https://opensource.org/licenses/Fair" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CECILL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-1.1.json", + "referenceNumber": 251, + "name": "CeCILL Free Software License Agreement v1.1", + "licenseId": "CECILL-1.1", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL_V1.1-US.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/QPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/QPL-1.0.json", + "referenceNumber": 252, + "name": "Q Public License 1.0", + "licenseId": "QPL-1.0", + "seeAlso": [ + "http://doc.qt.nokia.com/3.3/license.html", + "https://opensource.org/licenses/QPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/DOC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/DOC.json", + "referenceNumber": 253, + "name": "DOC License", + "licenseId": "DOC", + "seeAlso": [ + "http://www.cs.wustl.edu/~schmidt/ACE-copying.html", + "https://www.dre.vanderbilt.edu/~schmidt/ACE-copying.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LAL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LAL-1.2.json", + "referenceNumber": 254, + "name": "Licence Art Libre 1.2", + "licenseId": "LAL-1.2", + "seeAlso": [ + "http://artlibre.org/licence/lal/licence-art-libre-12/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPL-1.02.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPL-1.02.json", + "referenceNumber": 255, + "name": "Lucent Public License v1.02", + "licenseId": "LPL-1.02", + "seeAlso": [ + "http://plan9.bell-labs.com/plan9/license.html", + "https://opensource.org/licenses/LPL-1.02" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-P-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-P-2.0.json", + "referenceNumber": 256, + "name": "CERN Open Hardware Licence Version 2 - Permissive", + "licenseId": "CERN-OHL-P-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/etalab-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/etalab-2.0.json", + "referenceNumber": 257, + "name": "Etalab Open License 2.0", + "licenseId": "etalab-2.0", + "seeAlso": [ + "https://github.com/DISIC/politique-de-contribution-open-source/blob/master/LICENSE.pdf", + "https://raw.githubusercontent.com/DISIC/politique-de-contribution-open-source/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FTL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FTL.json", + "referenceNumber": 258, + "name": "Freetype Project License", + "licenseId": "FTL", + "seeAlso": [ + "http://freetype.fis.uniroma2.it/FTL.TXT", + "http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/FTL.TXT", + "http://gitlab.freedesktop.org/freetype/freetype/-/raw/master/docs/FTL.TXT" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Qhull.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Qhull.json", + "referenceNumber": 259, + "name": "Qhull License", + "licenseId": "Qhull", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Qhull" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Clear.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Clear.json", + "referenceNumber": 260, + "name": "BSD 3-Clause Clear License", + "licenseId": "BSD-3-Clause-Clear", + "seeAlso": [ + "http://labs.metacarta.com/license-explanation.html#license" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Military-License.json", + "referenceNumber": 261, + "name": "BSD 3-Clause No Military License", + "licenseId": "BSD-3-Clause-No-Military-License", + "seeAlso": [ + "https://gitlab.syncad.com/hive/dhive/-/blob/master/LICENSE", + "https://github.com/greymass/swift-eosio/blob/master/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FSFAP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFAP.json", + "referenceNumber": 262, + "name": "FSF All Permissive License", + "licenseId": "FSFAP", + "seeAlso": [ + "https://www.gnu.org/prep/maintain/html_node/License-Notices-for-Other-Files.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/APL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APL-1.0.json", + "referenceNumber": 263, + "name": "Adaptive Public License 1.0", + "licenseId": "APL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/APL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.8.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.8.json", + "referenceNumber": 264, + "name": "Open LDAP Public License v2.8", + "licenseId": "OLDAP-2.8", + "seeAlso": [ + "http://www.openldap.org/software/release/license.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/TORQUE-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TORQUE-1.1.json", + "referenceNumber": 265, + "name": "TORQUE v2.5+ Software License v1.1", + "licenseId": "TORQUE-1.1", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/TORQUEv1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Sendmail-8.23.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Sendmail-8.23.json", + "referenceNumber": 266, + "name": "Sendmail License 8.23", + "licenseId": "Sendmail-8.23", + "seeAlso": [ + "https://www.proofpoint.com/sites/default/files/sendmail-license.pdf", + "https://web.archive.org/web/20181003101040/https://www.proofpoint.com/sites/default/files/sendmail-license.pdf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/diffmark.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/diffmark.json", + "referenceNumber": 267, + "name": "diffmark license", + "licenseId": "diffmark", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/diffmark" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Frameworx-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Frameworx-1.0.json", + "referenceNumber": 268, + "name": "Frameworx Open License 1.0", + "licenseId": "Frameworx-1.0", + "seeAlso": [ + "https://opensource.org/licenses/Frameworx-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/zlib-acknowledgement.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/zlib-acknowledgement.json", + "referenceNumber": 269, + "name": "zlib/libpng License with Acknowledgement", + "licenseId": "zlib-acknowledgement", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/ZlibWithAcknowledgement" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EFL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EFL-1.0.json", + "referenceNumber": 270, + "name": "Eiffel Forum License v1.0", + "licenseId": "EFL-1.0", + "seeAlso": [ + "http://www.eiffel-nice.org/license/forum.txt", + "https://opensource.org/licenses/EFL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/IJG.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/IJG.json", + "referenceNumber": 271, + "name": "Independent JPEG Group License", + "licenseId": "IJG", + "seeAlso": [ + "http://dev.w3.org/cvsweb/Amaya/libjpeg/Attic/README?rev\u003d1.2" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-no-invariants-only.json", + "referenceNumber": 272, + "name": "GNU Free Documentation License v1.3 only - no invariants", + "licenseId": "GFDL-1.3-no-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Noweb.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Noweb.json", + "referenceNumber": 273, + "name": "Noweb License", + "licenseId": "Noweb", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Noweb" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3.json", + "referenceNumber": 274, + "name": "GNU Free Documentation License v1.3", + "licenseId": "GFDL-1.3", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1.json", + "referenceNumber": 275, + "name": "GNU Lesser General Public License v2.1 only", + "licenseId": "LGPL-2.1", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/gSOAP-1.3b.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/gSOAP-1.3b.json", + "referenceNumber": 276, + "name": "gSOAP Public License v1.3b", + "licenseId": "gSOAP-1.3b", + "seeAlso": [ + "http://www.cs.fsu.edu/~engelen/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.1-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.1-RFN.json", + "referenceNumber": 277, + "name": "SIL Open Font License 1.1 with Reserved Font Name", + "licenseId": "OFL-1.1-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL_web", + "https://opensource.org/licenses/OFL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-with-autoconf-exception.json", + "referenceNumber": 278, + "name": "GNU General Public License v3.0 w/Autoconf exception", + "licenseId": "GPL-3.0-with-autoconf-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/autoconf-exception-3.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-1.1.json", + "referenceNumber": 279, + "name": "CERN Open Hardware Licence v1.1", + "licenseId": "CERN-OHL-1.1", + "seeAlso": [ + "https://www.ohwr.org/project/licenses/wikis/cern-ohl-v1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AFL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-2.1.json", + "referenceNumber": 280, + "name": "Academic Free License v2.1", + "licenseId": "AFL-2.1", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-2.1.txt" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MIT-enna.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-enna.json", + "referenceNumber": 281, + "name": "enna License", + "licenseId": "MIT-enna", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#enna" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Adobe-Glyph.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Adobe-Glyph.json", + "referenceNumber": 282, + "name": "Adobe Glyph List License", + "licenseId": "Adobe-Glyph", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#AdobeGlyph" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/EPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPL-1.0.json", + "referenceNumber": 283, + "name": "Eclipse Public License 1.0", + "licenseId": "EPL-1.0", + "seeAlso": [ + "http://www.eclipse.org/legal/epl-v10.html", + "https://opensource.org/licenses/EPL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Xerox.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Xerox.json", + "referenceNumber": 284, + "name": "Xerox License", + "licenseId": "Xerox", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Xerox" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.0.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.1.json", + "referenceNumber": 285, + "name": "Open LDAP Public License v2.0.1", + "licenseId": "OLDAP-2.0.1", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003db6d68acd14e51ca3aab4428bf26522aa74873f0e" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MTLL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MTLL.json", + "referenceNumber": 286, + "name": "Matrix Template Library License", + "licenseId": "MTLL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Matrix_Template_Library_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ImageMagick.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ImageMagick.json", + "referenceNumber": 287, + "name": "ImageMagick License", + "licenseId": "ImageMagick", + "seeAlso": [ + "http://www.imagemagick.org/script/license.php" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/psutils.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/psutils.json", + "referenceNumber": 288, + "name": "psutils License", + "licenseId": "psutils", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/psutils" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ClArtistic.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ClArtistic.json", + "referenceNumber": 289, + "name": "Clarified Artistic License", + "licenseId": "ClArtistic", + "seeAlso": [ + "http://gianluca.dellavedova.org/2011/01/03/clarified-artistic-license/", + "http://www.ncftp.com/ncftp/doc/LICENSE.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-invariants-or-later.json", + "referenceNumber": 290, + "name": "GNU Free Documentation License v1.3 or later - invariants", + "licenseId": "GFDL-1.3-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APSL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.2.json", + "referenceNumber": 291, + "name": "Apple Public Source License 1.2", + "licenseId": "APSL-1.2", + "seeAlso": [ + "http://www.samurajdata.se/opensource/mirror/licenses/apsl.php" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Apache-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-2.0.json", + "referenceNumber": 292, + "name": "Apache License 2.0", + "licenseId": "Apache-2.0", + "seeAlso": [ + "http://www.apache.org/licenses/LICENSE-2.0", + "https://opensource.org/licenses/Apache-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/NIST-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NIST-PD.json", + "referenceNumber": 293, + "name": "NIST Public Domain Notice", + "licenseId": "NIST-PD", + "seeAlso": [ + "https://github.com/tcheneau/simpleRPL/blob/e645e69e38dd4e3ccfeceb2db8cba05b7c2e0cd3/LICENSE.txt", + "https://github.com/tcheneau/Routing/blob/f09f46fcfe636107f22f2c98348188a65a135d98/README.md" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Libpng.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Libpng.json", + "referenceNumber": 294, + "name": "libpng License", + "licenseId": "Libpng", + "seeAlso": [ + "http://www.libpng.org/pub/png/src/libpng-LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/TAPR-OHL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/TAPR-OHL-1.0.json", + "referenceNumber": 295, + "name": "TAPR Open Hardware License v1.0", + "licenseId": "TAPR-OHL-1.0", + "seeAlso": [ + "https://www.tapr.org/OHL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ICU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ICU.json", + "referenceNumber": 296, + "name": "ICU License", + "licenseId": "ICU", + "seeAlso": [ + "http://source.icu-project.org/repos/icu/icu/trunk/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-2.5.json", + "referenceNumber": 297, + "name": "Creative Commons Attribution Share Alike 2.5 Generic", + "licenseId": "CC-BY-SA-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-PDDC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-PDDC.json", + "referenceNumber": 298, + "name": "Creative Commons Public Domain Dedication and Certification", + "licenseId": "CC-PDDC", + "seeAlso": [ + "https://creativecommons.org/licenses/publicdomain/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-only.json", + "referenceNumber": 299, + "name": "GNU Affero General Public License v3.0 only", + "licenseId": "AGPL-3.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-1.1.json", + "referenceNumber": 300, + "name": "Open Software License 1.1", + "licenseId": "OSL-1.1", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/OSL1.1" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SugarCRM-1.1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SugarCRM-1.1.3.json", + "referenceNumber": 301, + "name": "SugarCRM Public License v1.1.3", + "licenseId": "SugarCRM-1.1.3", + "seeAlso": [ + "http://www.sugarcrm.com/crm/SPL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FreeImage.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FreeImage.json", + "referenceNumber": 302, + "name": "FreeImage Public License v1.0", + "licenseId": "FreeImage", + "seeAlso": [ + "http://freeimage.sourceforge.net/freeimage-license.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/W3C-20150513.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C-20150513.json", + "referenceNumber": 303, + "name": "W3C Software Notice and Document License (2015-05-13)", + "licenseId": "W3C-20150513", + "seeAlso": [ + "https://www.w3.org/Consortium/Legal/2015/copyright-software-and-document" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/D-FSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/D-FSL-1.0.json", + "referenceNumber": 304, + "name": "Deutsche Freie Software Lizenz", + "licenseId": "D-FSL-1.0", + "seeAlso": [ + "http://www.dipp.nrw.de/d-fsl/lizenzen/", + "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/de/D-FSL-1_0_de.txt", + "http://www.dipp.nrw.de/d-fsl/index_html/lizenzen/en/D-FSL-1_0_en.txt", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/deutsche-freie-software-lizenz", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/german-free-software-license", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_de.txt/at_download/file", + "https://www.hbz-nrw.de/produkte/open-access/lizenzen/dfsl/D-FSL-1_0_en.txt/at_download/file" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RSA-MD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RSA-MD.json", + "referenceNumber": 305, + "name": "RSA Message-Digest License", + "licenseId": "RSA-MD", + "seeAlso": [ + "http://www.faqs.org/rfcs/rfc1321.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-2.0.json", + "referenceNumber": 306, + "name": "Creative Commons Attribution No Derivatives 2.0 Generic", + "licenseId": "CC-BY-ND-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-GCC-exception.json", + "referenceNumber": 307, + "name": "GNU General Public License v2.0 w/GCC Runtime Library exception", + "licenseId": "GPL-2.0-with-GCC-exception", + "seeAlso": [ + "https://gcc.gnu.org/git/?p\u003dgcc.git;a\u003dblob;f\u003dgcc/libgcc1.c;h\u003d762f5143fc6eed57b6797c82710f3538aa52b40b;hb\u003dcb143a3ce4fb417c68f5fa2691a1b1b1053dfba9#l10" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0-or-later.json", + "referenceNumber": 308, + "name": "GNU Affero General Public License v3.0 or later", + "licenseId": "AGPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/AGPL-1.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AGPL-1.0-or-later.json", + "referenceNumber": 309, + "name": "Affero General Public License v1.0 or later", + "licenseId": "AGPL-1.0-or-later", + "seeAlso": [ + "http://www.affero.org/oagpl.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/iMatix.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/iMatix.json", + "referenceNumber": 310, + "name": "iMatix Standard Function Library Agreement", + "licenseId": "iMatix", + "seeAlso": [ + "http://legacy.imatix.com/html/sfl/sfl4.htm#license" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Plexus.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Plexus.json", + "referenceNumber": 311, + "name": "Plexus Classworlds License", + "licenseId": "Plexus", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Plexus_Classworlds_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OFL-1.0-no-RFN.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OFL-1.0-no-RFN.json", + "referenceNumber": 312, + "name": "SIL Open Font License 1.0 with no Reserved Font Name", + "licenseId": "OFL-1.0-no-RFN", + "seeAlso": [ + "http://scripts.sil.org/cms/scripts/page.php?item_id\u003dOFL10_web" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NAIST-2003.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NAIST-2003.json", + "referenceNumber": 313, + "name": "Nara Institute of Science and Technology License (2003)", + "licenseId": "NAIST-2003", + "seeAlso": [ + "https://enterprise.dejacode.com/licenses/public/naist-2003/#license-text", + "https://github.com/nodejs/node/blob/4a19cc8947b1bba2b2d27816ec3d0edf9b28e503/LICENSE#L343" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MIT-feh.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-feh.json", + "referenceNumber": 314, + "name": "feh License", + "licenseId": "MIT-feh", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT#feh" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ECL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ECL-2.0.json", + "referenceNumber": 315, + "name": "Educational Community License v2.0", + "licenseId": "ECL-2.0", + "seeAlso": [ + "https://opensource.org/licenses/ECL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-2.5.json", + "referenceNumber": 316, + "name": "Creative Commons Attribution 2.5 Generic", + "licenseId": "CC-BY-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/XSkat.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/XSkat.json", + "referenceNumber": 317, + "name": "XSkat License", + "licenseId": "XSkat", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/XSkat_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Linux-OpenIB.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Linux-OpenIB.json", + "referenceNumber": 318, + "name": "Linux Kernel Variant of OpenIB.org license", + "licenseId": "Linux-OpenIB", + "seeAlso": [ + "https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/drivers/infiniband/core/sa.h" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-99.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-99.json", + "referenceNumber": 319, + "name": "Spencer License 99", + "licenseId": "Spencer-99", + "seeAlso": [ + "http://www.opensource.apple.com/source/tcl/tcl-5/tcl/generic/regfronts.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License-2014.json", + "referenceNumber": 320, + "name": "BSD 3-Clause No Nuclear License 2014", + "licenseId": "BSD-3-Clause-No-Nuclear-License-2014", + "seeAlso": [ + "https://java.net/projects/javaeetutorial/pages/BerkeleyLicense" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-3.0-IGO.json", + "referenceNumber": 321, + "name": "Creative Commons Attribution Non Commercial No Derivatives 3.0 IGO", + "licenseId": "CC-BY-NC-ND-3.0-IGO", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/3.0/igo/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-1.0.json", + "referenceNumber": 322, + "name": "Creative Commons Attribution Non Commercial Share Alike 1.0 Generic", + "licenseId": "CC-BY-NC-SA-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-font-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-font-exception.json", + "referenceNumber": 323, + "name": "GNU General Public License v2.0 w/Font exception", + "licenseId": "GPL-2.0-with-font-exception", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-faq.html#FontException" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Crossword.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Crossword.json", + "referenceNumber": 324, + "name": "Crossword License", + "licenseId": "Crossword", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Crossword" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.2.json", + "referenceNumber": 325, + "name": "Open LDAP Public License 2.2.2", + "licenseId": "OLDAP-2.2.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003ddf2cc1e21eb7c160695f5b7cffd6296c151ba188" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-NetBSD.json", + "referenceNumber": 326, + "name": "BSD 2-Clause NetBSD License", + "licenseId": "BSD-2-Clause-NetBSD", + "seeAlso": [ + "http://www.netbsd.org/about/redistribution.html#default" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0+.json", + "referenceNumber": 327, + "name": "GNU General Public License v2.0 or later", + "licenseId": "GPL-2.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-4.0.json", + "referenceNumber": 328, + "name": "Creative Commons Attribution 4.0 International", + "licenseId": "CC-BY-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/4.0/legalcode" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.0.json", + "referenceNumber": 329, + "name": "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)", + "licenseId": "OLDAP-2.0", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcbf50f4e1185a21abd4c0a54d3f4341fe28f36ea" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NOSL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NOSL.json", + "referenceNumber": 330, + "name": "Netizen Open Source License", + "licenseId": "NOSL", + "seeAlso": [ + "http://bits.netizen.com.au/licenses/NOSL/nosl.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CDDL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CDDL-1.1.json", + "referenceNumber": 331, + "name": "Common Development and Distribution License 1.1", + "licenseId": "CDDL-1.1", + "seeAlso": [ + "http://glassfish.java.net/public/CDDL+GPL_1_1.html", + "https://javaee.github.io/glassfish/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-1.0.json", + "referenceNumber": 332, + "name": "Apple Public Source License 1.0", + "licenseId": "APSL-1.0", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Apple_Public_Source_License_1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/EUPL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EUPL-1.2.json", + "referenceNumber": 333, + "name": "European Union Public License 1.2", + "licenseId": "EUPL-1.2", + "seeAlso": [ + "https://joinup.ec.europa.eu/page/eupl-text-11-12", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/eupl_v1.2_en.pdf", + "https://joinup.ec.europa.eu/sites/default/files/custom-page/attachment/2020-03/EUPL-1.2%20EN.txt", + "https://joinup.ec.europa.eu/sites/default/files/inline-files/EUPL%20v1_2%20EN(1).txt", + "http://eur-lex.europa.eu/legal-content/EN/TXT/HTML/?uri\u003dCELEX:32017D0863", + "https://opensource.org/licenses/EUPL-1.2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Nokia.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Nokia.json", + "referenceNumber": 334, + "name": "Nokia Open Source License", + "licenseId": "Nokia", + "seeAlso": [ + "https://opensource.org/licenses/nokia" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/RHeCos-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RHeCos-1.1.json", + "referenceNumber": 335, + "name": "Red Hat eCos Public License v1.1", + "licenseId": "RHeCos-1.1", + "seeAlso": [ + "http://ecos.sourceware.org/old-license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-only.json", + "referenceNumber": 336, + "name": "GNU General Public License v2.0 only", + "licenseId": "GPL-2.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html", + "https://opensource.org/licenses/GPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.7.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.7.json", + "referenceNumber": 337, + "name": "Open LDAP Public License v2.7", + "licenseId": "OLDAP-2.7", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d47c2415c1df81556eeb39be6cad458ef87c534a2" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Vim.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Vim.json", + "referenceNumber": 338, + "name": "Vim License", + "licenseId": "Vim", + "seeAlso": [ + "http://vimdoc.sourceforge.net/htmldoc/uganda.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SAX-PD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SAX-PD.json", + "referenceNumber": 339, + "name": "Sax Public Domain Notice", + "licenseId": "SAX-PD", + "seeAlso": [ + "http://www.saxproject.org/copying.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-Warranty.json", + "referenceNumber": 340, + "name": "BSD 3-Clause No Nuclear Warranty", + "licenseId": "BSD-3-Clause-No-Nuclear-Warranty", + "seeAlso": [ + "https://jogamp.org/git/?p\u003dgluegen.git;a\u003dblob_plain;f\u003dLICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NetCDF.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NetCDF.json", + "referenceNumber": 341, + "name": "NetCDF license", + "licenseId": "NetCDF", + "seeAlso": [ + "http://www.unidata.ucar.edu/software/netcdf/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/dvipdfm.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/dvipdfm.json", + "referenceNumber": 342, + "name": "dvipdfm License", + "licenseId": "dvipdfm", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/dvipdfm" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SHL-0.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SHL-0.5.json", + "referenceNumber": 343, + "name": "Solderpad Hardware License v0.5", + "licenseId": "SHL-0.5", + "seeAlso": [ + "https://solderpad.org/licenses/SHL-0.5/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-only.json", + "referenceNumber": 344, + "name": "GNU Library General Public License v2 only", + "licenseId": "LGPL-2.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/AAL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AAL.json", + "referenceNumber": 345, + "name": "Attribution Assurance License", + "licenseId": "AAL", + "seeAlso": [ + "https://opensource.org/licenses/attribution" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Unicode-TOU.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-TOU.json", + "referenceNumber": 346, + "name": "Unicode Terms of Use", + "licenseId": "Unicode-TOU", + "seeAlso": [ + "http://www.unicode.org/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LPPL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPPL-1.2.json", + "referenceNumber": 347, + "name": "LaTeX Project Public License v1.2", + "licenseId": "LPPL-1.2", + "seeAlso": [ + "http://www.latex-project.org/lppl/lppl-1-2.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/xpp.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/xpp.json", + "referenceNumber": 348, + "name": "XPP License", + "licenseId": "xpp", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/xpp" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SHL-0.51.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SHL-0.51.json", + "referenceNumber": 349, + "name": "Solderpad Hardware License, Version 0.51", + "licenseId": "SHL-0.51", + "seeAlso": [ + "https://solderpad.org/licenses/SHL-0.51/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NCSA.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NCSA.json", + "referenceNumber": 350, + "name": "University of Illinois/NCSA Open Source License", + "licenseId": "NCSA", + "seeAlso": [ + "http://otm.illinois.edu/uiuc_openSource", + "https://opensource.org/licenses/NCSA" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0-or-later.json", + "referenceNumber": 351, + "name": "GNU Library General Public License v2 or later", + "licenseId": "LGPL-2.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-3.0.json", + "referenceNumber": 352, + "name": "Creative Commons Attribution 3.0 Unported", + "licenseId": "CC-BY-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0.json", + "referenceNumber": 353, + "name": "GNU General Public License v1.0 only", + "licenseId": "GPL-1.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/W3C.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/W3C.json", + "referenceNumber": 354, + "name": "W3C Software Notice and License (2002-12-31)", + "licenseId": "W3C", + "seeAlso": [ + "http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231.html", + "https://opensource.org/licenses/W3C" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Aladdin.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Aladdin.json", + "referenceNumber": 355, + "name": "Aladdin Free Public License", + "licenseId": "Aladdin", + "seeAlso": [ + "http://pages.cs.wisc.edu/~ghost/doc/AFPL/6.01/Public.htm" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-No-Nuclear-License.json", + "referenceNumber": 356, + "name": "BSD 3-Clause No Nuclear License", + "licenseId": "BSD-3-Clause-No-Nuclear-License", + "seeAlso": [ + "http://download.oracle.com/otn-pub/java/licenses/bsd.txt?AuthParam\u003d1467140197_43d516ce1776bd08a58235a7785be1cc" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-or-later.json", + "referenceNumber": 357, + "name": "GNU Free Documentation License v1.1 or later", + "licenseId": "GFDL-1.1-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/SMPPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SMPPL.json", + "referenceNumber": 358, + "name": "Secure Messaging Protocol Public License", + "licenseId": "SMPPL", + "seeAlso": [ + "https://github.com/dcblake/SMP/blob/master/Documentation/License.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1.json", + "referenceNumber": 359, + "name": "GNU Free Documentation License v1.1", + "licenseId": "GFDL-1.1", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.4.json", + "referenceNumber": 360, + "name": "Open LDAP Public License v1.4", + "licenseId": "OLDAP-1.4", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dc9f95c2f3f2ffb5e0ae55fe7388af75547660941" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Condor-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Condor-1.1.json", + "referenceNumber": 361, + "name": "Condor Public License v1.1", + "licenseId": "Condor-1.1", + "seeAlso": [ + "http://research.cs.wisc.edu/condor/license.html#condor", + "http://web.archive.org/web/20111123062036/http://research.cs.wisc.edu/condor/license.html#condor" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-1.0-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-1.0-only.json", + "referenceNumber": 362, + "name": "GNU General Public License v1.0 only", + "licenseId": "GPL-1.0-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/gpl-1.0-standalone.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0.json", + "referenceNumber": 363, + "name": "GNU General Public License v3.0 only", + "licenseId": "GPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/PSF-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PSF-2.0.json", + "referenceNumber": 364, + "name": "Python Software Foundation License 2.0", + "licenseId": "PSF-2.0", + "seeAlso": [ + "https://opensource.org/licenses/Python-2.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Apache-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Apache-1.0.json", + "referenceNumber": 365, + "name": "Apache License 1.0", + "licenseId": "Apache-1.0", + "seeAlso": [ + "http://www.apache.org/licenses/LICENSE-1.0" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPL-2.0.json", + "referenceNumber": 366, + "name": "Eclipse Public License 2.0", + "licenseId": "EPL-2.0", + "seeAlso": [ + "https://www.eclipse.org/legal/epl-2.0", + "https://www.opensource.org/licenses/EPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Python-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Python-2.0.json", + "referenceNumber": 367, + "name": "Python License 2.0", + "licenseId": "Python-2.0", + "seeAlso": [ + "https://opensource.org/licenses/Python-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.4.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.4.json", + "referenceNumber": 368, + "name": "Open LDAP Public License v2.4", + "licenseId": "OLDAP-2.4", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003dcd1284c4a91a8a380d904eee68d1583f989ed386" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/PostgreSQL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/PostgreSQL.json", + "referenceNumber": 369, + "name": "PostgreSQL License", + "licenseId": "PostgreSQL", + "seeAlso": [ + "http://www.postgresql.org/about/licence", + "https://opensource.org/licenses/PostgreSQL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Net-SNMP.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Net-SNMP.json", + "referenceNumber": 370, + "name": "Net-SNMP License", + "licenseId": "Net-SNMP", + "seeAlso": [ + "http://net-snmp.sourceforge.net/about/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Ruby.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Ruby.json", + "referenceNumber": 371, + "name": "Ruby License", + "licenseId": "Ruby", + "seeAlso": [ + "http://www.ruby-lang.org/en/LICENSE.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OSET-PL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSET-PL-2.1.json", + "referenceNumber": 372, + "name": "OSET Public License version 2.1", + "licenseId": "OSET-PL-2.1", + "seeAlso": [ + "http://www.osetfoundation.org/public-license", + "https://opensource.org/licenses/OPL-2.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Dotseqn.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Dotseqn.json", + "referenceNumber": 373, + "name": "Dotseqn License", + "licenseId": "Dotseqn", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Dotseqn" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CUA-OPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CUA-OPL-1.0.json", + "referenceNumber": 374, + "name": "CUA Office Public License v1.0", + "licenseId": "CUA-OPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/CUA-OPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/Bahyph.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Bahyph.json", + "referenceNumber": 375, + "name": "Bahyph License", + "licenseId": "Bahyph", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Bahyph" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LiLiQ-Rplus-1.1.json", + "referenceNumber": 376, + "name": "Licence Libre du Québec – Réciprocité forte version 1.1", + "licenseId": "LiLiQ-Rplus-1.1", + "seeAlso": [ + "https://www.forge.gouv.qc.ca/participez/licence-logicielle/licence-libre-du-quebec-liliq-en-francais/licence-libre-du-quebec-reciprocite-forte-liliq-r-v1-1/", + "http://opensource.org/licenses/LiLiQ-Rplus-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0+.json", + "referenceNumber": 377, + "name": "GNU Library General Public License v2 or later", + "licenseId": "LGPL-2.0+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/wxWindows.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/wxWindows.json", + "referenceNumber": 378, + "name": "wxWindows Library License", + "licenseId": "wxWindows", + "seeAlso": [ + "https://opensource.org/licenses/WXwindows" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/AGPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/AGPL-3.0.json", + "referenceNumber": 379, + "name": "GNU Affero General Public License v3.0", + "licenseId": "AGPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/agpl.txt", + "https://opensource.org/licenses/AGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Abstyles.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Abstyles.json", + "referenceNumber": 380, + "name": "Abstyles License", + "licenseId": "Abstyles", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Abstyles" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-1.3.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-1.3.json", + "referenceNumber": 381, + "name": "Open LDAP Public License v1.3", + "licenseId": "OLDAP-1.3", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003de5f8117f0ce088d0bd7a8e18ddf37eaa40eb09b1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NTP-0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NTP-0.json", + "referenceNumber": 382, + "name": "NTP No Attribution", + "licenseId": "NTP-0", + "seeAlso": [ + "https://github.com/tytso/e2fsprogs/blob/master/lib/et/et_name.c" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.2.json", + "referenceNumber": 383, + "name": "Open LDAP Public License v2.2", + "licenseId": "OLDAP-2.2", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d470b0c18ec67621c85881b2733057fecf4a1acc3" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0.json", + "referenceNumber": 384, + "name": "Creative Commons Attribution Share Alike 3.0 Unported", + "licenseId": "CC-BY-SA-3.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SWL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SWL.json", + "referenceNumber": 385, + "name": "Scheme Widget Library (SWL) Software License Agreement", + "licenseId": "SWL", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/SWL" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-3-Clause-Open-MPI.json", + "referenceNumber": 386, + "name": "BSD 3-Clause Open MPI variant", + "licenseId": "BSD-3-Clause-Open-MPI", + "seeAlso": [ + "https://www.open-mpi.org/community/license.php", + "http://www.netlib.org/lapack/LICENSE.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.1+.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.1+.json", + "referenceNumber": 387, + "name": "GNU Library General Public License v2.1 or later", + "licenseId": "LGPL-2.1+", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.1-standalone.html", + "https://opensource.org/licenses/LGPL-2.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-only.json", + "referenceNumber": 388, + "name": "GNU Free Documentation License v1.2 only - invariants", + "licenseId": "GFDL-1.2-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Zend-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zend-2.0.json", + "referenceNumber": 389, + "name": "Zend License v2.0", + "licenseId": "Zend-2.0", + "seeAlso": [ + "https://web.archive.org/web/20130517195954/http://www.zend.com/license/2_00.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-no-invariants-or-later.json", + "referenceNumber": 390, + "name": "GNU Free Documentation License v1.1 or later - no invariants", + "licenseId": "GFDL-1.1-no-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/mpich2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/mpich2.json", + "referenceNumber": 391, + "name": "mpich2 License", + "licenseId": "mpich2", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NLOD-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NLOD-1.0.json", + "referenceNumber": 392, + "name": "Norwegian Licence for Open Government Data", + "licenseId": "NLOD-1.0", + "seeAlso": [ + "http://data.norge.no/nlod/en/1.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/gnuplot.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/gnuplot.json", + "referenceNumber": 393, + "name": "gnuplot License", + "licenseId": "gnuplot", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Gnuplot" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-S-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-S-2.0.json", + "referenceNumber": 394, + "name": "CERN Open Hardware Licence Version 2 - Strongly Reciprocal", + "licenseId": "CERN-OHL-S-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/OGL-UK-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OGL-UK-2.0.json", + "referenceNumber": 395, + "name": "Open Government Licence v2.0", + "licenseId": "OGL-UK-2.0", + "seeAlso": [ + "http://www.nationalarchives.gov.uk/doc/open-government-licence/version/2/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPL-1.1.json", + "referenceNumber": 396, + "name": "Netscape Public License v1.1", + "licenseId": "NPL-1.1", + "seeAlso": [ + "http://www.mozilla.org/MPL/NPL/1.1/" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Zed.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Zed.json", + "referenceNumber": 397, + "name": "Zed License", + "licenseId": "Zed", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Zed" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/VOSTROM.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/VOSTROM.json", + "referenceNumber": 398, + "name": "VOSTROM Public License for Open Source", + "licenseId": "VOSTROM", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/VOSTROM" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ZPL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-2.0.json", + "referenceNumber": 399, + "name": "Zope Public License 2.0", + "licenseId": "ZPL-2.0", + "seeAlso": [ + "http://old.zope.org/Resources/License/ZPL-2.0", + "https://opensource.org/licenses/ZPL-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CERN-OHL-W-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CERN-OHL-W-2.0.json", + "referenceNumber": 400, + "name": "CERN Open Hardware Licence Version 2 - Weakly Reciprocal", + "licenseId": "CERN-OHL-W-2.0", + "seeAlso": [ + "https://www.ohwr.org/project/cernohl/wikis/Documents/CERN-OHL-version-2" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.0.json", + "referenceNumber": 401, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.0 Generic", + "licenseId": "CC-BY-NC-SA-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/APSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APSL-2.0.json", + "referenceNumber": 402, + "name": "Apple Public Source License 2.0", + "licenseId": "APSL-2.0", + "seeAlso": [ + "http://www.opensource.apple.com/license/apsl/" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/LPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/LPL-1.0.json", + "referenceNumber": 403, + "name": "Lucent Public License Version 1.0", + "licenseId": "LPL-1.0", + "seeAlso": [ + "https://opensource.org/licenses/LPL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/ANTLR-PD-fallback.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ANTLR-PD-fallback.json", + "referenceNumber": 404, + "name": "ANTLR Software Rights Notice with license fallback", + "licenseId": "ANTLR-PD-fallback", + "seeAlso": [ + "http://www.antlr2.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libtiff.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libtiff.json", + "referenceNumber": 405, + "name": "libtiff License", + "licenseId": "libtiff", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/libtiff" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/HPND.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/HPND.json", + "referenceNumber": 406, + "name": "Historical Permission Notice and Disclaimer", + "licenseId": "HPND", + "seeAlso": [ + "https://opensource.org/licenses/HPND" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GPL-3.0-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GPL-3.0-or-later.json", + "referenceNumber": 407, + "name": "GNU General Public License v3.0 or later", + "licenseId": "GPL-3.0-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/gpl-3.0-standalone.html", + "https://opensource.org/licenses/GPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Artistic-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-2.0.json", + "referenceNumber": 408, + "name": "Artistic License 2.0", + "licenseId": "Artistic-2.0", + "seeAlso": [ + "http://www.perlfoundation.org/artistic_license_2_0", + "https://www.perlfoundation.org/artistic-license-20.html", + "https://opensource.org/licenses/artistic-license-2.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Unicode-DFS-2015.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Unicode-DFS-2015.json", + "referenceNumber": 409, + "name": "Unicode License Agreement - Data Files and Software (2015)", + "licenseId": "Unicode-DFS-2015", + "seeAlso": [ + "https://web.archive.org/web/20151224134844/http://unicode.org/copyright.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-4.0.json", + "referenceNumber": 410, + "name": "Creative Commons Attribution Non Commercial 4.0 International", + "licenseId": "CC-BY-NC-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RPL-1.1.json", + "referenceNumber": 411, + "name": "Reciprocal Public License 1.1", + "licenseId": "RPL-1.1", + "seeAlso": [ + "https://opensource.org/licenses/RPL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-1.0.json", + "referenceNumber": 412, + "name": "Creative Commons Attribution Share Alike 1.0 Generic", + "licenseId": "CC-BY-SA-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Cube.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Cube.json", + "referenceNumber": 413, + "name": "Cube License", + "licenseId": "Cube", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Cube" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ODC-By-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ODC-By-1.0.json", + "referenceNumber": 414, + "name": "Open Data Commons Attribution License v1.0", + "licenseId": "ODC-By-1.0", + "seeAlso": [ + "https://opendatacommons.org/licenses/by/1.0/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/copyleft-next-0.3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/copyleft-next-0.3.0.json", + "referenceNumber": 415, + "name": "copyleft-next 0.3.0", + "licenseId": "copyleft-next-0.3.0", + "seeAlso": [ + "https://github.com/copyleft-next/copyleft-next/blob/master/Releases/copyleft-next-0.3.0" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-4.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-4.0.json", + "referenceNumber": 416, + "name": "Creative Commons Attribution No Derivatives 4.0 International", + "licenseId": "CC-BY-ND-4.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/4.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ZPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ZPL-1.1.json", + "referenceNumber": 417, + "name": "Zope Public License 1.1", + "licenseId": "ZPL-1.1", + "seeAlso": [ + "http://old.zope.org/Resources/License/ZPL-1.1" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.3-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.3-or-later.json", + "referenceNumber": 418, + "name": "GNU Free Documentation License v1.3 or later", + "licenseId": "GFDL-1.3-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/fdl-1.3.txt" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CATOSL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CATOSL-1.1.json", + "referenceNumber": 419, + "name": "Computer Associates Trusted Open Source License 1.1", + "licenseId": "CATOSL-1.1", + "seeAlso": [ + "https://opensource.org/licenses/CATOSL-1.1" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/GPL-2.0-with-classpath-exception.json", + "referenceNumber": 420, + "name": "GNU General Public License v2.0 w/Classpath exception", + "licenseId": "GPL-2.0-with-classpath-exception", + "seeAlso": [ + "https://www.gnu.org/software/classpath/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/LGPL-2.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-2.0.json", + "referenceNumber": 421, + "name": "GNU Library General Public License v2 only", + "licenseId": "LGPL-2.0", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/lgpl-2.0-standalone.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-2-Clause-Views.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-2-Clause-Views.json", + "referenceNumber": 422, + "name": "BSD 2-Clause with views sentence", + "licenseId": "BSD-2-Clause-Views", + "seeAlso": [ + "http://www.freebsd.org/copyright/freebsd-license.html", + "https://people.freebsd.org/~ivoras/wine/patch-wine-nvidia.sh", + "https://github.com/protegeproject/protege/blob/master/license.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/BSL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSL-1.0.json", + "referenceNumber": 423, + "name": "Boost Software License 1.0", + "licenseId": "BSL-1.0", + "seeAlso": [ + "http://www.boost.org/LICENSE_1_0.txt", + "https://opensource.org/licenses/BSL-1.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CNRI-Jython.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CNRI-Jython.json", + "referenceNumber": 424, + "name": "CNRI Jython License", + "licenseId": "CNRI-Jython", + "seeAlso": [ + "http://www.jython.org/license.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Eurosym.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Eurosym.json", + "referenceNumber": 425, + "name": "Eurosym License", + "licenseId": "Eurosym", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Eurosym" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-SA-3.0-AT.json", + "referenceNumber": 426, + "name": "Creative Commons Attribution Share Alike 3.0 Austria", + "licenseId": "CC-BY-SA-3.0-AT", + "seeAlso": [ + "https://creativecommons.org/licenses/by-sa/3.0/at/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CECILL-C.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CECILL-C.json", + "referenceNumber": 427, + "name": "CeCILL-C Free Software License Agreement", + "licenseId": "CECILL-C", + "seeAlso": [ + "http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/EPICS.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/EPICS.json", + "referenceNumber": 428, + "name": "EPICS Open License", + "licenseId": "EPICS", + "seeAlso": [ + "https://epics.anl.gov/license/open.php" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-ND-2.0.json", + "referenceNumber": 429, + "name": "Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic", + "licenseId": "CC-BY-NC-ND-2.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-nd/2.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GD.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GD.json", + "referenceNumber": 430, + "name": "GD License", + "licenseId": "GD", + "seeAlso": [ + "https://libgd.github.io/manuals/2.3.0/files/license-txt.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/X11.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/X11.json", + "referenceNumber": 431, + "name": "X11 License", + "licenseId": "X11", + "seeAlso": [ + "http://www.xfree86.org/3.3.6/COPYRIGHT2.html#3" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MPL-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MPL-1.1.json", + "referenceNumber": 432, + "name": "Mozilla Public License 1.1", + "licenseId": "MPL-1.1", + "seeAlso": [ + "http://www.mozilla.org/MPL/MPL-1.1.html", + "https://opensource.org/licenses/MPL-1.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.1-invariants-only.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.1-invariants-only.json", + "referenceNumber": 433, + "name": "GNU Free Documentation License v1.1 only - invariants", + "licenseId": "GFDL-1.1-invariants-only", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.1.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/psfrag.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/psfrag.json", + "referenceNumber": 434, + "name": "psfrag License", + "licenseId": "psfrag", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/psfrag" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/RSCPL.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/RSCPL.json", + "referenceNumber": 435, + "name": "Ricoh Source Code Public License", + "licenseId": "RSCPL", + "seeAlso": [ + "http://wayback.archive.org/web/20060715140826/http://www.risource.org/RPL/RPL-1.0A.shtml", + "https://opensource.org/licenses/RSCPL" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/YPL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/YPL-1.0.json", + "referenceNumber": 436, + "name": "Yahoo! Public License v1.0", + "licenseId": "YPL-1.0", + "seeAlso": [ + "http://www.zimbra.com/license/yahoo_public_license_1.0.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SGI-B-1.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-1.1.json", + "referenceNumber": 437, + "name": "SGI Free Software License B v1.1", + "licenseId": "SGI-B-1.1", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-ND-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-ND-1.0.json", + "referenceNumber": 438, + "name": "Creative Commons Attribution No Derivatives 1.0 Generic", + "licenseId": "CC-BY-ND-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nd/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/SGI-B-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/SGI-B-2.0.json", + "referenceNumber": 439, + "name": "SGI Free Software License B v2.0", + "licenseId": "SGI-B-2.0", + "seeAlso": [ + "http://oss.sgi.com/projects/FreeB/SGIFreeSWLicB.2.0.pdf" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/APAFML.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/APAFML.json", + "referenceNumber": 440, + "name": "Adobe Postscript AFM License", + "licenseId": "APAFML", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/AdobePostscriptAFM" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Spencer-94.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Spencer-94.json", + "referenceNumber": 441, + "name": "Spencer License 94", + "licenseId": "Spencer-94", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/Henry_Spencer_Reg-Ex_Library_License" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/ISC.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/ISC.json", + "referenceNumber": 442, + "name": "ISC License", + "licenseId": "ISC", + "seeAlso": [ + "https://www.isc.org/downloads/software-support-policy/isc-license/", + "https://opensource.org/licenses/ISC" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/MIT-advertising.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MIT-advertising.json", + "referenceNumber": 443, + "name": "Enlightenment License (e16)", + "licenseId": "MIT-advertising", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/MIT_With_Advertising" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/GFDL-1.2-invariants-or-later.json", + "referenceNumber": 444, + "name": "GNU Free Documentation License v1.2 or later - invariants", + "licenseId": "GFDL-1.2-invariants-or-later", + "seeAlso": [ + "https://www.gnu.org/licenses/old-licenses/fdl-1.2.txt" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-NC-SA-2.5.json", + "referenceNumber": 445, + "name": "Creative Commons Attribution Non Commercial Share Alike 2.5 Generic", + "licenseId": "CC-BY-NC-SA-2.5", + "seeAlso": [ + "https://creativecommons.org/licenses/by-nc-sa/2.5/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/CC-BY-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CC-BY-1.0.json", + "referenceNumber": 446, + "name": "Creative Commons Attribution 1.0 Generic", + "licenseId": "CC-BY-1.0", + "seeAlso": [ + "https://creativecommons.org/licenses/by/1.0/legalcode" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/OSL-2.1.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OSL-2.1.json", + "referenceNumber": 447, + "name": "Open Software License 2.1", + "licenseId": "OSL-2.1", + "seeAlso": [ + "http://web.archive.org/web/20050212003940/http://www.rosenlaw.com/osl21.htm", + "https://opensource.org/licenses/OSL-2.1" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CrystalStacker.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CrystalStacker.json", + "referenceNumber": 448, + "name": "CrystalStacker License", + "licenseId": "CrystalStacker", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing:CrystalStacker?rd\u003dLicensing/CrystalStacker" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/FSFULLR.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/FSFULLR.json", + "referenceNumber": 449, + "name": "FSF Unlimited License (with License Retention)", + "licenseId": "FSFULLR", + "seeAlso": [ + "https://fedoraproject.org/wiki/Licensing/FSF_Unlimited_License#License_Retention_Variant" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/libselinux-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/libselinux-1.0.json", + "referenceNumber": 450, + "name": "libselinux public domain notice", + "licenseId": "libselinux-1.0", + "seeAlso": [ + "https://github.com/SELinuxProject/selinux/blob/master/libselinux/LICENSE" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/MulanPSL-2.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/MulanPSL-2.0.json", + "referenceNumber": 451, + "name": "Mulan Permissive Software License, Version 2", + "licenseId": "MulanPSL-2.0", + "seeAlso": [ + "https://license.coscl.org.cn/MulanPSL2/" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/LGPL-3.0.html", + "isDeprecatedLicenseId": true, + "detailsUrl": "https://spdx.org/licenses/LGPL-3.0.json", + "referenceNumber": 452, + "name": "GNU Lesser General Public License v3.0 only", + "licenseId": "LGPL-3.0", + "seeAlso": [ + "https://www.gnu.org/licenses/lgpl-3.0-standalone.html", + "https://opensource.org/licenses/LGPL-3.0" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/OLDAP-2.5.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/OLDAP-2.5.json", + "referenceNumber": 453, + "name": "Open LDAP Public License v2.5", + "licenseId": "OLDAP-2.5", + "seeAlso": [ + "http://www.openldap.org/devel/gitweb.cgi?p\u003dopenldap.git;a\u003dblob;f\u003dLICENSE;hb\u003d6852b9d90022e8593c98205413380536b1b5a7cf" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/Artistic-1.0-Perl.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Artistic-1.0-Perl.json", + "referenceNumber": 454, + "name": "Artistic License 1.0 (Perl)", + "licenseId": "Artistic-1.0-Perl", + "seeAlso": [ + "http://dev.perl.org/licenses/artistic.html" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/AFL-1.2.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/AFL-1.2.json", + "referenceNumber": 455, + "name": "Academic Free License v1.2", + "licenseId": "AFL-1.2", + "seeAlso": [ + "http://opensource.linux-mirror.org/licenses/afl-1.2.txt", + "http://wayback.archive.org/web/20021204204652/http://www.opensource.org/licenses/academic.php" + ], + "isOsiApproved": true, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/CAL-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/CAL-1.0.json", + "referenceNumber": 456, + "name": "Cryptographic Autonomy License 1.0", + "licenseId": "CAL-1.0", + "seeAlso": [ + "http://cryptographicautonomylicense.com/license-text.html", + "https://opensource.org/licenses/CAL-1.0" + ], + "isOsiApproved": true + }, + { + "reference": "https://spdx.org/licenses/BSD-4-Clause.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/BSD-4-Clause.json", + "referenceNumber": 457, + "name": "BSD 4-Clause \"Original\" or \"Old\" License", + "licenseId": "BSD-4-Clause", + "seeAlso": [ + "http://directory.fsf.org/wiki/License:BSD_4Clause" + ], + "isOsiApproved": false, + "isFsfLibre": true + }, + { + "reference": "https://spdx.org/licenses/Interbase-1.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/Interbase-1.0.json", + "referenceNumber": 458, + "name": "Interbase Public License v1.0", + "licenseId": "Interbase-1.0", + "seeAlso": [ + "https://web.archive.org/web/20060319014854/http://info.borland.com/devsupport/interbase/opensource/IPL.html" + ], + "isOsiApproved": false + }, + { + "reference": "https://spdx.org/licenses/NPOSL-3.0.html", + "isDeprecatedLicenseId": false, + "detailsUrl": "https://spdx.org/licenses/NPOSL-3.0.json", + "referenceNumber": 459, + "name": "Non-Profit Open Software License 3.0", + "licenseId": "NPOSL-3.0", + "seeAlso": [ + "https://opensource.org/licenses/NOSL3.0" + ], + "isOsiApproved": true + } + ], + "releaseDate": "2021-05-20" +}
\ No newline at end of file diff --git a/test/schemas/f b/test/schemas/f new file mode 120000 index 0000000..ae8ff29 --- /dev/null +++ b/test/schemas/f @@ -0,0 +1 @@ +../../src/ansiblelint/schemas
\ No newline at end of file diff --git a/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml b/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml new file mode 100644 index 0000000..2639e9a --- /dev/null +++ b/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml @@ -0,0 +1,4 @@ +--- +releases: + 1.0.0: + release_date: 01-01-2020 # invalid date format, must be ISO-8601 ! diff --git a/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml.md b/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml.md new file mode 100644 index 0000000..72b4f96 --- /dev/null +++ b/test/schemas/negative_test/changelogs/invalid-date/changelogs/changelog.yaml.md @@ -0,0 +1,40 @@ +# ajv errors + +```json +[ + { + "instancePath": "/releases/1.0.0/release_date", + "keyword": "pattern", + "message": "must match pattern \"\\d\\d\\d\\d-\\d\\d-\\d\\d\"", + "params": { + "pattern": "\\d\\d\\d\\d-\\d\\d-\\d\\d" + }, + "schemaPath": "#/properties/release_date/pattern" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/changelogs/invalid-date/changelogs/changelog.yaml", + "path": "$.releases.1.0.0.release_date", + "message": "'01-01-2020' is not a 'date'", + "has_sub_errors": false + }, + { + "filename": "negative_test/changelogs/invalid-date/changelogs/changelog.yaml", + "path": "$.releases.1.0.0.release_date", + "message": "'01-01-2020' does not match '\\\\d\\\\d\\\\d\\\\d-\\\\d\\\\d-\\\\d\\\\d'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml b/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml new file mode 100644 index 0000000..99632a4 --- /dev/null +++ b/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml @@ -0,0 +1,8 @@ +--- +releases: + 1.0.0: + plugins: + lookup: + - name: reverse + description: Reverse magic + namespace: "foo" # namespace must be null for plugins and objects diff --git a/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml.md b/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml.md new file mode 100644 index 0000000..ef847c3 --- /dev/null +++ b/test/schemas/negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/releases/1.0.0/plugins/lookup/0/namespace", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/$defs/plugin-descriptions/items/properties/namespace/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/changelogs/invalid-plugin-namespace/changelogs/changelog.yaml", + "path": "$.releases.1.0.0.plugins.lookup[0].namespace", + "message": "'foo' is not of type 'null'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml b/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml new file mode 100644 index 0000000..72def5b --- /dev/null +++ b/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml @@ -0,0 +1,4 @@ +--- +- this is invalid +- as changelog must be object (mapping) +- not an array (sequence) diff --git a/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml.md b/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml.md new file mode 100644 index 0000000..5938944 --- /dev/null +++ b/test/schemas/negative_test/changelogs/list/changelogs/changelog.yaml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/changelogs/list/changelogs/changelog.yaml", + "path": "$", + "message": "['this is invalid', 'as changelog must be object (mapping)', 'not an array (sequence)'] is not of type 'object'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml b/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml new file mode 100644 index 0000000..d08ebd0 --- /dev/null +++ b/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +releases: foo # <-- not a semver diff --git a/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml.md b/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml.md new file mode 100644 index 0000000..64c4665 --- /dev/null +++ b/test/schemas/negative_test/changelogs/no-semver/changelogs/changelog.yaml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/releases", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/properties/releases/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/changelogs/no-semver/changelogs/changelog.yaml", + "path": "$.releases", + "message": "'foo' is not of type 'object'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml b/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml new file mode 100644 index 0000000..a97e4e2 --- /dev/null +++ b/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml @@ -0,0 +1,2 @@ +--- +release: {} # <- unknown key, correct would be releases diff --git a/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml.md b/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml.md new file mode 100644 index 0000000..490bdbe --- /dev/null +++ b/test/schemas/negative_test/changelogs/unknown-keys/changelogs/changelog.yaml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "release" + }, + "schemaPath": "#/additionalProperties" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/changelogs/unknown-keys/changelogs/changelog.yaml", + "path": "$", + "message": "Additional properties are not allowed ('release' was unexpected)", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/inventory/broken_dev_inventory.yml b/test/schemas/negative_test/inventory/broken_dev_inventory.yml new file mode 100644 index 0000000..ce84309 --- /dev/null +++ b/test/schemas/negative_test/inventory/broken_dev_inventory.yml @@ -0,0 +1,10 @@ +--- +# See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html +ungrouped: {} +all: + hosts: + mail.example.com: + children: + foo: {} # <-- invalid based on inventory json schema + vars: {} +webservers: {} diff --git a/test/schemas/negative_test/inventory/broken_dev_inventory.yml.md b/test/schemas/negative_test/inventory/broken_dev_inventory.yml.md new file mode 100644 index 0000000..d4fefaf --- /dev/null +++ b/test/schemas/negative_test/inventory/broken_dev_inventory.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/all", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "foo" + }, + "schemaPath": "#/$defs/special-group/additionalProperties" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/inventory/broken_dev_inventory.yml", + "path": "$.all", + "message": "Additional properties are not allowed ('foo' was unexpected)", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/meta/runtime.yml b/test/schemas/negative_test/meta/runtime.yml new file mode 100644 index 0000000..c143dc6 --- /dev/null +++ b/test/schemas/negative_test/meta/runtime.yml @@ -0,0 +1 @@ +requires_ansible: ">= 2.12" # invalid as space is not allowed! diff --git a/test/schemas/negative_test/meta/runtime.yml.md b/test/schemas/negative_test/meta/runtime.yml.md new file mode 100644 index 0000000..761fa6f --- /dev/null +++ b/test/schemas/negative_test/meta/runtime.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/requires_ansible", + "keyword": "pattern", + "message": "must match pattern \"^[^\\s]*$\"", + "params": { + "pattern": "^[^\\s]*$" + }, + "schemaPath": "#/properties/requires_ansible/pattern" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/meta/runtime.yml", + "path": "$.requires_ansible", + "message": "'>= 2.12' does not match '^[^\\\\s]*$'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/molecule/platforms_children/molecule.yml b/test/schemas/negative_test/molecule/platforms_children/molecule.yml new file mode 100644 index 0000000..6800584 --- /dev/null +++ b/test/schemas/negative_test/molecule/platforms_children/molecule.yml @@ -0,0 +1,5 @@ +driver: + name: delegated +platforms: + - name: foo + children: 2 # invalid, must be list of strings diff --git a/test/schemas/negative_test/molecule/platforms_children/molecule.yml.md b/test/schemas/negative_test/molecule/platforms_children/molecule.yml.md new file mode 100644 index 0000000..68e09eb --- /dev/null +++ b/test/schemas/negative_test/molecule/platforms_children/molecule.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/platforms/0/children", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/children/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/molecule/platforms_children/molecule.yml", + "path": "$.platforms[0].children", + "message": "2 is not of type 'array'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/molecule/platforms_networks/molecule.yml b/test/schemas/negative_test/molecule/platforms_networks/molecule.yml new file mode 100644 index 0000000..4ae9799 --- /dev/null +++ b/test/schemas/negative_test/molecule/platforms_networks/molecule.yml @@ -0,0 +1,7 @@ +driver: + name: docker +platforms: + - name: docker + networks: # invalid, must be list of dictionaries + - foo + - bar diff --git a/test/schemas/negative_test/molecule/platforms_networks/molecule.yml.md b/test/schemas/negative_test/molecule/platforms_networks/molecule.yml.md new file mode 100644 index 0000000..74b8de7 --- /dev/null +++ b/test/schemas/negative_test/molecule/platforms_networks/molecule.yml.md @@ -0,0 +1,49 @@ +# ajv errors + +```json +[ + { + "instancePath": "/platforms/0/networks/0", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/$defs/platform-network/type" + }, + { + "instancePath": "/platforms/0/networks/1", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/$defs/platform-network/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/molecule/platforms_networks/molecule.yml", + "path": "$.platforms[0].networks[0]", + "message": "'foo' is not of type 'object'", + "has_sub_errors": false + }, + { + "filename": "negative_test/molecule/platforms_networks/molecule.yml", + "path": "$.platforms[0].networks[1]", + "message": "'bar' is not of type 'object'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/environment.yml b/test/schemas/negative_test/playbooks/environment.yml new file mode 100644 index 0000000..2064aca --- /dev/null +++ b/test/schemas/negative_test/playbooks/environment.yml @@ -0,0 +1,3 @@ +--- +- hosts: localhost + environment: "{{ foo }}-123" # <- invalid only a full jinja string is allowed, or a list of strings diff --git a/test/schemas/negative_test/playbooks/environment.yml.md b/test/schemas/negative_test/playbooks/environment.yml.md new file mode 100644 index 0000000..8923cb3 --- /dev/null +++ b/test/schemas/negative_test/playbooks/environment.yml.md @@ -0,0 +1,138 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "environment" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/environment", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/anyOf/0/type" + }, + { + "instancePath": "/0/environment", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/environment", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/environment.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'environment': '{{ foo }}-123'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'environment', 'hosts' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'environment', 'hosts' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'environment': '{{ foo }}-123'} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].environment", + "message": "'{{ foo }}-123' is not valid under any of the given schemas" + }, + { + "path": "$[0].environment", + "message": "'{{ foo }}-123' is not of type 'object'" + }, + { + "path": "$[0].environment", + "message": "'{{ foo }}-123' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/failed_when.yml b/test/schemas/negative_test/playbooks/failed_when.yml new file mode 100644 index 0000000..59b7272 --- /dev/null +++ b/test/schemas/negative_test/playbooks/failed_when.yml @@ -0,0 +1,6 @@ +- hosts: localhost + tasks: + - name: foo + ansible.builtin.debug: + msg: foo! + failed_when: 123 # <- not ok diff --git a/test/schemas/negative_test/playbooks/failed_when.yml.md b/test/schemas/negative_test/playbooks/failed_when.yml.md new file mode 100644 index 0000000..e843e1f --- /dev/null +++ b/test/schemas/negative_test/playbooks/failed_when.yml.md @@ -0,0 +1,177 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/failed_when.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'foo', 'ansible.builtin.debug': {'msg': 'foo!'}, 'failed_when': 123}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'foo', 'ansible.builtin.debug': {'msg': 'foo!'}, 'failed_when': 123}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'name': 'foo', 'ansible.builtin.debug': {'msg': 'foo!'}, 'failed_when': 123} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/gather_facts.yml b/test/schemas/negative_test/playbooks/gather_facts.yml new file mode 100644 index 0000000..d1b1345 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_facts.yml @@ -0,0 +1,6 @@ +--- +- hosts: localhost + gather_facts: non + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/negative_test/playbooks/gather_facts.yml.md b/test/schemas/negative_test/playbooks/gather_facts.yml.md new file mode 100644 index 0000000..0eb3a4b --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_facts.yml.md @@ -0,0 +1,123 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "gather_facts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/gather_facts", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/properties/gather_facts/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/gather_facts.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_facts': 'non', 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'gather_facts', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'gather_facts', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_facts': 'non', 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].gather_facts", + "message": "'non' is not of type 'boolean'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/gather_subset.yml b/test/schemas/negative_test/playbooks/gather_subset.yml new file mode 100644 index 0000000..455d683 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset.yml @@ -0,0 +1,6 @@ +--- +- hosts: localhost + gather_subset: all + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/negative_test/playbooks/gather_subset.yml.md b/test/schemas/negative_test/playbooks/gather_subset.yml.md new file mode 100644 index 0000000..b426a23 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset.yml.md @@ -0,0 +1,123 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "gather_subset" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/gather_subset", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/gather_subset/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/gather_subset.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': 'all', 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': 'all', 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].gather_subset", + "message": "'all' is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/gather_subset2.yml b/test/schemas/negative_test/playbooks/gather_subset2.yml new file mode 100644 index 0000000..d5a39ae --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset2.yml @@ -0,0 +1,7 @@ +--- +- hosts: localhost + gather_subset: + - invalid + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/negative_test/playbooks/gather_subset2.yml.md b/test/schemas/negative_test/playbooks/gather_subset2.yml.md new file mode 100644 index 0000000..8d6be68 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset2.yml.md @@ -0,0 +1,277 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "gather_subset" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ] + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/0/enum" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ] + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/1/enum" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/properties/gather_subset/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/gather_subset2.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': ['invalid'], 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': ['invalid'], 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].gather_subset[0]", + "message": "'invalid' is not valid under any of the given schemas" + }, + { + "path": "$[0].gather_subset[0]", + "message": "'invalid' is not one of ['all', 'min', 'all_ipv4_addresses', 'all_ipv6_addresses', 'apparmor', 'architecture', 'caps', 'chroot,cmdline', 'date_time', 'default_ipv4', 'default_ipv6', 'devices', 'distribution', 'distribution_major_version', 'distribution_release', 'distribution_version', 'dns', 'effective_group_ids', 'effective_user_id', 'env', 'facter', 'fips', 'hardware', 'interfaces', 'is_chroot', 'iscsi', 'kernel', 'local', 'lsb', 'machine', 'machine_id', 'mounts', 'network', 'ohai', 'os_family', 'pkg_mgr', 'platform', 'processor', 'processor_cores', 'processor_count', 'python', 'python_version', 'real_user_id', 'selinux', 'service_mgr', 'ssh_host_key_dsa_public', 'ssh_host_key_ecdsa_public', 'ssh_host_key_ed25519_public', 'ssh_host_key_rsa_public', 'ssh_host_pub_keys', 'ssh_pub_keys', 'system', 'system_capabilities', 'system_capabilities_enforced', 'user', 'user_dir', 'user_gecos', 'user_gid', 'user_id', 'user_shell', 'user_uid', 'virtual', 'virtualization_role', 'virtualization_type']" + }, + { + "path": "$[0].gather_subset[0]", + "message": "'invalid' is not one of ['!all', '!min', '!all_ipv4_addresses', '!all_ipv6_addresses', '!apparmor', '!architecture', '!caps', '!chroot,cmdline', '!date_time', '!default_ipv4', '!default_ipv6', '!devices', '!distribution', '!distribution_major_version', '!distribution_release', '!distribution_version', '!dns', '!effective_group_ids', '!effective_user_id', '!env', '!facter', '!fips', '!hardware', '!interfaces', '!is_chroot', '!iscsi', '!kernel', '!local', '!lsb', '!machine', '!machine_id', '!mounts', '!network', '!ohai', '!os_family', '!pkg_mgr', '!platform', '!processor', '!processor_cores', '!processor_count', '!python', '!python_version', '!real_user_id', '!selinux', '!service_mgr', '!ssh_host_key_dsa_public', '!ssh_host_key_ecdsa_public', '!ssh_host_key_ed25519_public', '!ssh_host_key_rsa_public', '!ssh_host_pub_keys', '!ssh_pub_keys', '!system', '!system_capabilities', '!system_capabilities_enforced', '!user', '!user_dir', '!user_gecos', '!user_gid', '!user_id', '!user_shell', '!user_uid', '!virtual', '!virtualization_role', '!virtualization_type']" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/gather_subset3.yml b/test/schemas/negative_test/playbooks/gather_subset3.yml new file mode 100644 index 0000000..05e4028 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset3.yml @@ -0,0 +1,7 @@ +--- +- hosts: localhost + gather_subset: + - 1 + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/negative_test/playbooks/gather_subset3.yml.md b/test/schemas/negative_test/playbooks/gather_subset3.yml.md new file mode 100644 index 0000000..7dc1b13 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset3.yml.md @@ -0,0 +1,303 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "gather_subset" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/0/type" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "all", + "min", + "all_ipv4_addresses", + "all_ipv6_addresses", + "apparmor", + "architecture", + "caps", + "chroot,cmdline", + "date_time", + "default_ipv4", + "default_ipv6", + "devices", + "distribution", + "distribution_major_version", + "distribution_release", + "distribution_version", + "dns", + "effective_group_ids", + "effective_user_id", + "env", + "facter", + "fips", + "hardware", + "interfaces", + "is_chroot", + "iscsi", + "kernel", + "local", + "lsb", + "machine", + "machine_id", + "mounts", + "network", + "ohai", + "os_family", + "pkg_mgr", + "platform", + "processor", + "processor_cores", + "processor_count", + "python", + "python_version", + "real_user_id", + "selinux", + "service_mgr", + "ssh_host_key_dsa_public", + "ssh_host_key_ecdsa_public", + "ssh_host_key_ed25519_public", + "ssh_host_key_rsa_public", + "ssh_host_pub_keys", + "ssh_pub_keys", + "system", + "system_capabilities", + "system_capabilities_enforced", + "user", + "user_dir", + "user_gecos", + "user_gid", + "user_id", + "user_shell", + "user_uid", + "virtual", + "virtualization_role", + "virtualization_type" + ] + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/0/enum" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/1/type" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "!all", + "!min", + "!all_ipv4_addresses", + "!all_ipv6_addresses", + "!apparmor", + "!architecture", + "!caps", + "!chroot,cmdline", + "!date_time", + "!default_ipv4", + "!default_ipv6", + "!devices", + "!distribution", + "!distribution_major_version", + "!distribution_release", + "!distribution_version", + "!dns", + "!effective_group_ids", + "!effective_user_id", + "!env", + "!facter", + "!fips", + "!hardware", + "!interfaces", + "!is_chroot", + "!iscsi", + "!kernel", + "!local", + "!lsb", + "!machine", + "!machine_id", + "!mounts", + "!network", + "!ohai", + "!os_family", + "!pkg_mgr", + "!platform", + "!processor", + "!processor_cores", + "!processor_count", + "!python", + "!python_version", + "!real_user_id", + "!selinux", + "!service_mgr", + "!ssh_host_key_dsa_public", + "!ssh_host_key_ecdsa_public", + "!ssh_host_key_ed25519_public", + "!ssh_host_key_rsa_public", + "!ssh_host_pub_keys", + "!ssh_pub_keys", + "!system", + "!system_capabilities", + "!system_capabilities_enforced", + "!user", + "!user_dir", + "!user_gecos", + "!user_gid", + "!user_id", + "!user_shell", + "!user_uid", + "!virtual", + "!virtualization_role", + "!virtualization_type" + ] + }, + "schemaPath": "#/properties/gather_subset/items/anyOf/1/enum" + }, + { + "instancePath": "/0/gather_subset/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/properties/gather_subset/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/gather_subset3.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': [1], 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': [1], 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].gather_subset[0]", + "message": "1 is not valid under any of the given schemas" + }, + { + "path": "$[0].gather_subset[0]", + "message": "1 is not one of ['all', 'min', 'all_ipv4_addresses', 'all_ipv6_addresses', 'apparmor', 'architecture', 'caps', 'chroot,cmdline', 'date_time', 'default_ipv4', 'default_ipv6', 'devices', 'distribution', 'distribution_major_version', 'distribution_release', 'distribution_version', 'dns', 'effective_group_ids', 'effective_user_id', 'env', 'facter', 'fips', 'hardware', 'interfaces', 'is_chroot', 'iscsi', 'kernel', 'local', 'lsb', 'machine', 'machine_id', 'mounts', 'network', 'ohai', 'os_family', 'pkg_mgr', 'platform', 'processor', 'processor_cores', 'processor_count', 'python', 'python_version', 'real_user_id', 'selinux', 'service_mgr', 'ssh_host_key_dsa_public', 'ssh_host_key_ecdsa_public', 'ssh_host_key_ed25519_public', 'ssh_host_key_rsa_public', 'ssh_host_pub_keys', 'ssh_pub_keys', 'system', 'system_capabilities', 'system_capabilities_enforced', 'user', 'user_dir', 'user_gecos', 'user_gid', 'user_id', 'user_shell', 'user_uid', 'virtual', 'virtualization_role', 'virtualization_type']" + }, + { + "path": "$[0].gather_subset[0]", + "message": "1 is not of type 'string'" + }, + { + "path": "$[0].gather_subset[0]", + "message": "1 is not one of ['!all', '!min', '!all_ipv4_addresses', '!all_ipv6_addresses', '!apparmor', '!architecture', '!caps', '!chroot,cmdline', '!date_time', '!default_ipv4', '!default_ipv6', '!devices', '!distribution', '!distribution_major_version', '!distribution_release', '!distribution_version', '!dns', '!effective_group_ids', '!effective_user_id', '!env', '!facter', '!fips', '!hardware', '!interfaces', '!is_chroot', '!iscsi', '!kernel', '!local', '!lsb', '!machine', '!machine_id', '!mounts', '!network', '!ohai', '!os_family', '!pkg_mgr', '!platform', '!processor', '!processor_cores', '!processor_count', '!python', '!python_version', '!real_user_id', '!selinux', '!service_mgr', '!ssh_host_key_dsa_public', '!ssh_host_key_ecdsa_public', '!ssh_host_key_ed25519_public', '!ssh_host_key_rsa_public', '!ssh_host_pub_keys', '!ssh_pub_keys', '!system', '!system_capabilities', '!system_capabilities_enforced', '!user', '!user_dir', '!user_gecos', '!user_gid', '!user_id', '!user_shell', '!user_uid', '!virtual', '!virtualization_role', '!virtualization_type']" + }, + { + "path": "$[0].gather_subset[0]", + "message": "1 is not of type 'string'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/gather_subset4.yml b/test/schemas/negative_test/playbooks/gather_subset4.yml new file mode 100644 index 0000000..816e666 --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset4.yml @@ -0,0 +1,6 @@ +--- +- hosts: localhost + gather_subset: 1 + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/negative_test/playbooks/gather_subset4.yml.md b/test/schemas/negative_test/playbooks/gather_subset4.yml.md new file mode 100644 index 0000000..ada01cb --- /dev/null +++ b/test/schemas/negative_test/playbooks/gather_subset4.yml.md @@ -0,0 +1,123 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "gather_subset" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/gather_subset", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/gather_subset/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/gather_subset4.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': 1, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'gather_subset', 'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'gather_subset': 1, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].gather_subset", + "message": "1 is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/ignore_errors.yml b/test/schemas/negative_test/playbooks/ignore_errors.yml new file mode 100644 index 0000000..9da277f --- /dev/null +++ b/test/schemas/negative_test/playbooks/ignore_errors.yml @@ -0,0 +1,6 @@ +- hosts: localhost + tasks: + - command: echo 123 + vars: + should_ignore_errors: true + ignore_errors: should_ignore_errors # invalid due to missing {{ }} diff --git a/test/schemas/negative_test/playbooks/ignore_errors.yml.md b/test/schemas/negative_test/playbooks/ignore_errors.yml.md new file mode 100644 index 0000000..61c3116 --- /dev/null +++ b/test/schemas/negative_test/playbooks/ignore_errors.yml.md @@ -0,0 +1,203 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/ignore_errors", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/ignore_errors.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'command': 'echo 123', 'vars': {'should_ignore_errors': True}, 'ignore_errors': 'should_ignore_errors'}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'command': 'echo 123', 'vars': {'should_ignore_errors': True}, 'ignore_errors': 'should_ignore_errors'}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'command': 'echo 123', 'vars': {'should_ignore_errors': True}, 'ignore_errors': 'should_ignore_errors'} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].ignore_errors", + "message": "'should_ignore_errors' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/import_playbook.yml b/test/schemas/negative_test/playbooks/import_playbook.yml new file mode 100644 index 0000000..b6d8ec2 --- /dev/null +++ b/test/schemas/negative_test/playbooks/import_playbook.yml @@ -0,0 +1 @@ +- ansible.builtin.import_playbook: {} # only freeform/string is allowed diff --git a/test/schemas/negative_test/playbooks/import_playbook.yml.md b/test/schemas/negative_test/playbooks/import_playbook.yml.md new file mode 100644 index 0000000..def3dce --- /dev/null +++ b/test/schemas/negative_test/playbooks/import_playbook.yml.md @@ -0,0 +1,90 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0/ansible.builtin.import_playbook", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/patternProperties/%5E(ansible%5C.builtin%5C.)%3Fimport_playbook%24/type" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/allOf/0/not" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'hosts'", + "params": { + "missingProperty": "hosts" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/import_playbook.yml", + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': {}} should not be valid under {'required': ['ansible.builtin.import_playbook']}" + }, + "sub_errors": [ + { + "path": "$[0].ansible.builtin.import_playbook", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0]", + "message": "Additional properties are not allowed ('ansible.builtin.import_playbook' was unexpected)" + }, + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': {}} should not be valid under {'required': ['ansible.builtin.import_playbook']}" + }, + { + "path": "$[0]", + "message": "'hosts' is a required property" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml b/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml new file mode 100644 index 0000000..ef2b5f6 --- /dev/null +++ b/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml @@ -0,0 +1,4 @@ +--- +# invalid because you cannot have both entries in the same time: +- ansible.builtin.import_playbook: foo.yml + import_playbook: other.yml diff --git a/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml.md b/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml.md new file mode 100644 index 0000000..184a434 --- /dev/null +++ b/test/schemas/negative_test/playbooks/import_playbook_exclusive.yml.md @@ -0,0 +1,132 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/oneOf/0/not" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/oneOf/1/not" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/allOf/0/not" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/allOf/1/not" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'hosts'", + "params": { + "missingProperty": "hosts" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "import_playbook" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/import_playbook_exclusive.yml", + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} should not be valid under {'required': ['ansible.builtin.import_playbook']}" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} should not be valid under {'required': ['import_playbook']}" + }, + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} should not be valid under {'required': ['ansible.builtin.import_playbook']}" + }, + { + "path": "$[0]", + "message": "Additional properties are not allowed ('ansible.builtin.import_playbook', 'import_playbook' were unexpected)" + }, + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} should not be valid under {'required': ['ansible.builtin.import_playbook']}" + }, + { + "path": "$[0]", + "message": "{'ansible.builtin.import_playbook': 'foo.yml', 'import_playbook': 'other.yml'} should not be valid under {'required': ['import_playbook']}" + }, + { + "path": "$[0]", + "message": "'hosts' is a required property" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/invalid-failed-when.yml b/test/schemas/negative_test/playbooks/invalid-failed-when.yml new file mode 100644 index 0000000..075f166 --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid-failed-when.yml @@ -0,0 +1,15 @@ +- hosts: localhost + tasks: + - debug: + msg: "failed_when should not accept numeric" + failed_when: 123 + + - debug: + msg: "failed_when should not accept sequence" + failed_when: + - foo + - bar + + - debug: + msg: "failed_when should not accept map" + failed_when: {} diff --git a/test/schemas/negative_test/playbooks/invalid-failed-when.yml.md b/test/schemas/negative_test/playbooks/invalid-failed-when.yml.md new file mode 100644 index 0000000..3a41059 --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid-failed-when.yml.md @@ -0,0 +1,253 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/tasks/0/failed_when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0/tasks/2", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/2/failed_when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/2/failed_when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/tasks/2/failed_when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/tasks/2/failed_when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0/tasks/2", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/invalid-failed-when.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'debug': {'msg': 'failed_when should not accept numeric'}, 'failed_when': 123}, {'debug': {'msg': 'failed_when should not accept sequence'}, 'failed_when': ['foo', 'bar']}, {'debug': {'msg': 'failed_when should not accept map'}, 'failed_when': {}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'debug': {'msg': 'failed_when should not accept numeric'}, 'failed_when': 123}, {'debug': {'msg': 'failed_when should not accept sequence'}, 'failed_when': ['foo', 'bar']}, {'debug': {'msg': 'failed_when should not accept map'}, 'failed_when': {}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'debug': {'msg': 'failed_when should not accept numeric'}, 'failed_when': 123} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tasks[0].failed_when", + "message": "123 is not of type 'array'" + }, + { + "path": "$[0].tasks[2]", + "message": "{'debug': {'msg': 'failed_when should not accept map'}, 'failed_when': {}} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[2]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[2].failed_when", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[2].failed_when", + "message": "{} is not of type 'boolean'" + }, + { + "path": "$[0].tasks[2].failed_when", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].tasks[2].failed_when", + "message": "{} is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/invalid-serial.yml b/test/schemas/negative_test/playbooks/invalid-serial.yml new file mode 100644 index 0000000..f2ffd3c --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid-serial.yml @@ -0,0 +1,2 @@ +- hosts: localhost + serial: 10%BAD diff --git a/test/schemas/negative_test/playbooks/invalid-serial.yml.md b/test/schemas/negative_test/playbooks/invalid-serial.yml.md new file mode 100644 index 0000000..5c48b21 --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid-serial.yml.md @@ -0,0 +1,177 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "serial" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/serial", + "keyword": "type", + "message": "must be integer", + "params": { + "type": "integer" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/serial", + "keyword": "pattern", + "message": "must match pattern \"^\\d+\\.?\\d*%?$\"", + "params": { + "pattern": "^\\d+\\.?\\d*%?$" + }, + "schemaPath": "#/oneOf/1/pattern" + }, + { + "instancePath": "/0/serial", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/serial", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/serial", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/serial/anyOf/1/type" + }, + { + "instancePath": "/0/serial", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/properties/serial/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/invalid-serial.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'serial': '10%BAD'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'serial' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'serial' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'serial': '10%BAD'} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' is not valid under any of the given schemas" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' is not valid under any of the given schemas" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' is not of type 'integer'" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' does not match '^\\\\d+\\\\.?\\\\d*%?$'" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0].serial", + "message": "'10%BAD' is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/invalid.yml b/test/schemas/negative_test/playbooks/invalid.yml new file mode 100644 index 0000000..e34d3c9 --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid.yml @@ -0,0 +1,3 @@ +- name: foo + hosts: localhost # <-- not allowed with import_playbook + import_playbook: included.yml diff --git a/test/schemas/negative_test/playbooks/invalid.yml.md b/test/schemas/negative_test/playbooks/invalid.yml.md new file mode 100644 index 0000000..c3435dd --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/allOf/1/not" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "import_playbook" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/invalid.yml", + "path": "$[0]", + "message": "{'name': 'foo', 'hosts': 'localhost', 'import_playbook': 'included.yml'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "{'name': 'foo', 'hosts': 'localhost', 'import_playbook': 'included.yml'} should not be valid under {'required': ['import_playbook']}" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "Additional properties are not allowed ('import_playbook' was unexpected)" + }, + { + "path": "$[0]", + "message": "{'name': 'foo', 'hosts': 'localhost', 'import_playbook': 'included.yml'} should not be valid under {'required': ['import_playbook']}" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/invalid_become.yml b/test/schemas/negative_test/playbooks/invalid_become.yml new file mode 100644 index 0000000..0cc6721 --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid_become.yml @@ -0,0 +1,3 @@ +--- +- hosts: localhost + become: yes # <- invalid based on json schema diff --git a/test/schemas/negative_test/playbooks/invalid_become.yml.md b/test/schemas/negative_test/playbooks/invalid_become.yml.md new file mode 100644 index 0000000..37d730d --- /dev/null +++ b/test/schemas/negative_test/playbooks/invalid_become.yml.md @@ -0,0 +1,140 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "become" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/become", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/become", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/become", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/invalid_become.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'become': 'yes'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'become', 'hosts' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'become', 'hosts' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'become': 'yes'} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].become", + "message": "'yes' is not valid under any of the given schemas" + }, + { + "path": "$[0].become", + "message": "'yes' is not of type 'boolean'" + }, + { + "path": "$[0].become", + "message": "'yes' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/local_action.yml b/test/schemas/negative_test/playbooks/local_action.yml new file mode 100644 index 0000000..9e01b1d --- /dev/null +++ b/test/schemas/negative_test/playbooks/local_action.yml @@ -0,0 +1,3 @@ +- hosts: localhost + tasks: + - local_action: [] # <-- only string or dict is allowed diff --git a/test/schemas/negative_test/playbooks/local_action.yml.md b/test/schemas/negative_test/playbooks/local_action.yml.md new file mode 100644 index 0000000..17f6244 --- /dev/null +++ b/test/schemas/negative_test/playbooks/local_action.yml.md @@ -0,0 +1,141 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/local_action", + "keyword": "type", + "message": "must be string,object", + "params": { + "type": [ + "string", + "object" + ] + }, + "schemaPath": "#/properties/local_action/type" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/local_action.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'local_action': []}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'local_action': []}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'local_action': []} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].local_action", + "message": "[] is not of type 'string', 'object'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/loop.yml b/test/schemas/negative_test/playbooks/loop.yml new file mode 100644 index 0000000..fd02ec5 --- /dev/null +++ b/test/schemas/negative_test/playbooks/loop.yml @@ -0,0 +1,7 @@ +--- +- hosts: localhost + tasks: + - name: that should pass + ansible.builtin.debug: + var: item + loop: 123 # <-- number is not valid diff --git a/test/schemas/negative_test/playbooks/loop.yml.md b/test/schemas/negative_test/playbooks/loop.yml.md new file mode 100644 index 0000000..88df838 --- /dev/null +++ b/test/schemas/negative_test/playbooks/loop.yml.md @@ -0,0 +1,141 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/loop", + "keyword": "type", + "message": "must be string,array", + "params": { + "type": [ + "string", + "array" + ] + }, + "schemaPath": "#/properties/loop/type" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/loop.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': 123}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': 123}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': 123} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].loop", + "message": "123 is not of type 'string', 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/loop2.yml b/test/schemas/negative_test/playbooks/loop2.yml new file mode 100644 index 0000000..7c9f2db --- /dev/null +++ b/test/schemas/negative_test/playbooks/loop2.yml @@ -0,0 +1,7 @@ +--- +- hosts: localhost + tasks: + - name: that should pass + ansible.builtin.debug: + var: item + loop: {} # <-- map is not valid diff --git a/test/schemas/negative_test/playbooks/loop2.yml.md b/test/schemas/negative_test/playbooks/loop2.yml.md new file mode 100644 index 0000000..df60a41 --- /dev/null +++ b/test/schemas/negative_test/playbooks/loop2.yml.md @@ -0,0 +1,141 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/loop", + "keyword": "type", + "message": "must be string,array", + "params": { + "type": [ + "string", + "array" + ] + }, + "schemaPath": "#/properties/loop/type" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/loop2.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': {}}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': {}}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'name': 'that should pass', 'ansible.builtin.debug': {'var': 'item'}, 'loop': {}} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].loop", + "message": "{} is not of type 'string', 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/no_log_partial_template.yml b/test/schemas/negative_test/playbooks/no_log_partial_template.yml new file mode 100644 index 0000000..224aba8 --- /dev/null +++ b/test/schemas/negative_test/playbooks/no_log_partial_template.yml @@ -0,0 +1,7 @@ +- hosts: localhost + vars: + some_var: true + tasks: + - ansible.builtin.debug: + msg: foo + no_log: "foo-{{ some_var }}" # <-- partial templating not allowed here diff --git a/test/schemas/negative_test/playbooks/no_log_partial_template.yml.md b/test/schemas/negative_test/playbooks/no_log_partial_template.yml.md new file mode 100644 index 0000000..ee73686 --- /dev/null +++ b/test/schemas/negative_test/playbooks/no_log_partial_template.yml.md @@ -0,0 +1,203 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/no_log_partial_template.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars': {'some_var': True}, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'foo-{{ some_var }}'}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars': {'some_var': True}, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'foo-{{ some_var }}'}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'foo-{{ some_var }}'} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'foo-{{ some_var }}' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/no_log_string.yml b/test/schemas/negative_test/playbooks/no_log_string.yml new file mode 100644 index 0000000..caf88e2 --- /dev/null +++ b/test/schemas/negative_test/playbooks/no_log_string.yml @@ -0,0 +1,7 @@ +- hosts: localhost + vars: + some_var: true + tasks: + - ansible.builtin.debug: + msg: foo + no_log: some_var # <-- bad, jinja use must be explicit diff --git a/test/schemas/negative_test/playbooks/no_log_string.yml.md b/test/schemas/negative_test/playbooks/no_log_string.yml.md new file mode 100644 index 0000000..c8213c0 --- /dev/null +++ b/test/schemas/negative_test/playbooks/no_log_string.yml.md @@ -0,0 +1,203 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/tasks/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/no_log_string.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars': {'some_var': True}, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'some_var'}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars': {'some_var': True}, 'tasks': [{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'some_var'}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 'some_var'} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].no_log", + "message": "'some_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/roles.yml b/test/schemas/negative_test/playbooks/roles.yml new file mode 100644 index 0000000..e24445a --- /dev/null +++ b/test/schemas/negative_test/playbooks/roles.yml @@ -0,0 +1,2 @@ +- hosts: localhost + roles: xxx # must be array diff --git a/test/schemas/negative_test/playbooks/roles.yml.md b/test/schemas/negative_test/playbooks/roles.yml.md new file mode 100644 index 0000000..9b4e25a --- /dev/null +++ b/test/schemas/negative_test/playbooks/roles.yml.md @@ -0,0 +1,114 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "roles" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/roles", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/roles/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/roles.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'roles': 'xxx'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'roles' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'roles' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'roles': 'xxx'} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].roles", + "message": "'xxx' is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/run_once_list.yml b/test/schemas/negative_test/playbooks/run_once_list.yml new file mode 100644 index 0000000..0dd2cd5 --- /dev/null +++ b/test/schemas/negative_test/playbooks/run_once_list.yml @@ -0,0 +1,8 @@ +- hosts: localhost + tasks: + - name: foo2 + ansible.builtin.debug: + msg: foo! + run_once: # invalid due to schema, also ansible does not allow lists + - "{{ true }}" + - xxx diff --git a/test/schemas/negative_test/playbooks/run_once_list.yml.md b/test/schemas/negative_test/playbooks/run_once_list.yml.md new file mode 100644 index 0000000..84b7dc1 --- /dev/null +++ b/test/schemas/negative_test/playbooks/run_once_list.yml.md @@ -0,0 +1,221 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/oneOf/1/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/oneOf/1/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/tasks/0/run_once", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/tasks/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/run_once_list.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'foo2', 'ansible.builtin.debug': {'msg': 'foo!'}, 'run_once': ['{{ true }}', 'xxx']}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tasks': [{'name': 'foo2', 'ansible.builtin.debug': {'msg': 'foo!'}, 'run_once': ['{{ true }}', 'xxx']}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tasks[0]", + "message": "{'name': 'foo2', 'ansible.builtin.debug': {'msg': 'foo!'}, 'run_once': ['{{ true }}', 'xxx']} is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not of type 'string'" + }, + { + "path": "$[0].tasks[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not valid under any of the given schemas" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not of type 'boolean'" + }, + { + "path": "$[0].tasks[0].run_once", + "message": "['{{ true }}', 'xxx'] is not of type 'string'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tags-mapping.yml b/test/schemas/negative_test/playbooks/tags-mapping.yml new file mode 100644 index 0000000..8c6da3d --- /dev/null +++ b/test/schemas/negative_test/playbooks/tags-mapping.yml @@ -0,0 +1,2 @@ +- hosts: localhost + tags: {} # <-- not allowed diff --git a/test/schemas/negative_test/playbooks/tags-mapping.yml.md b/test/schemas/negative_test/playbooks/tags-mapping.yml.md new file mode 100644 index 0000000..aada0c6 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tags-mapping.yml.md @@ -0,0 +1,166 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tags-mapping.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tags': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tags': {}} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tags", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'array'" + }, + { + "path": "$[0].tags", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tags-number.yml b/test/schemas/negative_test/playbooks/tags-number.yml new file mode 100644 index 0000000..1872ced --- /dev/null +++ b/test/schemas/negative_test/playbooks/tags-number.yml @@ -0,0 +1,2 @@ +- hosts: localhost + tags: 123 # <-- not allowed diff --git a/test/schemas/negative_test/playbooks/tags-number.yml.md b/test/schemas/negative_test/playbooks/tags-number.yml.md new file mode 100644 index 0000000..3d32737 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tags-number.yml.md @@ -0,0 +1,166 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tags-number.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tags': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'tags': 123} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].tags", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'array'" + }, + { + "path": "$[0].tags", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks.yml b/test/schemas/negative_test/playbooks/tasks.yml new file mode 100644 index 0000000..2464a73 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks.yml @@ -0,0 +1,5 @@ +- hosts: localhost + pre_tasks: foo # <-- must be array + post_tasks: {} # <-- must be array + tasks: 1 # <-- must be array + handlers: 1.0 # <-- must be array diff --git a/test/schemas/negative_test/playbooks/tasks.yml.md b/test/schemas/negative_test/playbooks/tasks.yml.md new file mode 100644 index 0000000..309912b --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks.yml.md @@ -0,0 +1,192 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "pre_tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "post_tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tasks" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "handlers" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/handlers", + "keyword": "type", + "message": "must be array,null", + "params": { + "type": [ + "array", + "null" + ] + }, + "schemaPath": "#/type" + }, + { + "instancePath": "/0/post_tasks", + "keyword": "type", + "message": "must be array,null", + "params": { + "type": [ + "array", + "null" + ] + }, + "schemaPath": "#/type" + }, + { + "instancePath": "/0/pre_tasks", + "keyword": "type", + "message": "must be array,null", + "params": { + "type": [ + "array", + "null" + ] + }, + "schemaPath": "#/type" + }, + { + "instancePath": "/0/tasks", + "keyword": "type", + "message": "must be array,null", + "params": { + "type": [ + "array", + "null" + ] + }, + "schemaPath": "#/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'pre_tasks': 'foo', 'post_tasks': {}, 'tasks': 1, 'handlers': 1.0} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'handlers', 'hosts', 'post_tasks', 'pre_tasks', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'handlers', 'hosts', 'post_tasks', 'pre_tasks', 'tasks' do not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'pre_tasks': 'foo', 'post_tasks': {}, 'tasks': 1, 'handlers': 1.0} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].handlers", + "message": "1.0 is not of type 'array', 'null'" + }, + { + "path": "$[0].post_tasks", + "message": "{} is not of type 'array', 'null'" + }, + { + "path": "$[0].pre_tasks", + "message": "'foo' is not of type 'array', 'null'" + }, + { + "path": "$[0].tasks", + "message": "1 is not of type 'array', 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/args_integer.yml b/test/schemas/negative_test/playbooks/tasks/args_integer.yml new file mode 100644 index 0000000..b831039 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/args_integer.yml @@ -0,0 +1,2 @@ +- action: foo + args: 123 # invalid diff --git a/test/schemas/negative_test/playbooks/tasks/args_integer.yml.md b/test/schemas/negative_test/playbooks/tasks/args_integer.yml.md new file mode 100644 index 0000000..8820251 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/args_integer.yml.md @@ -0,0 +1,99 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/args", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/args", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/oneOf/1/type" + }, + { + "instancePath": "/0/args", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/args", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/args_integer.yml", + "path": "$[0]", + "message": "{'action': 'foo', 'args': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].args", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].args", + "message": "123 is not of type 'object'" + }, + { + "path": "$[0].args", + "message": "123 is not of type 'string'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/args_string.yml b/test/schemas/negative_test/playbooks/tasks/args_string.yml new file mode 100644 index 0000000..121da6d --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/args_string.yml @@ -0,0 +1,2 @@ +- action: foo + args: "{{ }}123" # invalid as only full jinja2 expressions are allowed diff --git a/test/schemas/negative_test/playbooks/tasks/args_string.yml.md b/test/schemas/negative_test/playbooks/tasks/args_string.yml.md new file mode 100644 index 0000000..6359a14 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/args_string.yml.md @@ -0,0 +1,90 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/args", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/args", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/args", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/args_string.yml", + "path": "$[0]", + "message": "{'action': 'foo', 'args': '{{ }}123'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].args", + "message": "'{{ }}123' is not valid under any of the given schemas" + }, + { + "path": "$[0].args", + "message": "'{{ }}123' is not of type 'object'" + }, + { + "path": "$[0].args", + "message": "'{{ }}123' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml b/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml new file mode 100644 index 0000000..bc7217f --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml @@ -0,0 +1,4 @@ +- command: echo 123 + vars: + sudo_var: doo + become_method: sudo_var # templating requires {{ }} diff --git a/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml.md b/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml.md new file mode 100644 index 0000000..25d3704 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/become_method_untemplated.yml.md @@ -0,0 +1,149 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/become_method", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ] + }, + "schemaPath": "#/oneOf/0/enum" + }, + { + "instancePath": "/0/become_method", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/become_method", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/become_method", + "keyword": "enum", + "message": "must be equal to one of the allowed values", + "params": { + "allowedValues": [ + "sudo", + "su", + "pbrun", + "pfexec", + "runas", + "dzdo", + "ksu", + "doas", + "machinectl" + ] + }, + "schemaPath": "#/oneOf/0/enum" + }, + { + "instancePath": "/0/become_method", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/become_method", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/become_method_untemplated.yml", + "path": "$[0]", + "message": "{'command': 'echo 123', 'vars': {'sudo_var': 'doo'}, 'become_method': 'sudo_var'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].become_method", + "message": "'sudo_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].become_method", + "message": "'sudo_var' is not one of ['sudo', 'su', 'pbrun', 'pfexec', 'runas', 'dzdo', 'ksu', 'doas', 'machinectl']" + }, + { + "path": "$[0].become_method", + "message": "'sudo_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].become_method", + "message": "'sudo_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].become_method", + "message": "'sudo_var' is not one of ['sudo', 'su', 'pbrun', 'pfexec', 'runas', 'dzdo', 'ksu', 'doas', 'machinectl']" + }, + { + "path": "$[0].become_method", + "message": "'sudo_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml b/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml new file mode 100644 index 0000000..4f8cbb3 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml @@ -0,0 +1,4 @@ +- command: echo 123 + vars: + should_ignore_errors: true + ignore_errors: should_ignore_errors # invalid due to missing {{ }} diff --git a/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml.md b/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml.md new file mode 100644 index 0000000..559a200 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/ignore_errors.yml.md @@ -0,0 +1,129 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/ignore_errors", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/ignore_errors.yml", + "path": "$[0]", + "message": "{'command': 'echo 123', 'vars': {'should_ignore_errors': True}, 'ignore_errors': 'should_ignore_errors'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' is not valid under any of the given schemas" + }, + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' is not of type 'boolean'" + }, + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' is not valid under any of the given schemas" + }, + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' is not of type 'boolean'" + }, + { + "path": "$[0].ignore_errors", + "message": "'should_ignore_errors' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/invalid_block.yml b/test/schemas/negative_test/playbooks/tasks/invalid_block.yml new file mode 100644 index 0000000..6fef6d1 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/invalid_block.yml @@ -0,0 +1,2 @@ +--- +- block: {} # <-- invalid, should be array diff --git a/test/schemas/negative_test/playbooks/tasks/invalid_block.yml.md b/test/schemas/negative_test/playbooks/tasks/invalid_block.yml.md new file mode 100644 index 0000000..bf4b30e --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/invalid_block.yml.md @@ -0,0 +1,62 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0/block", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/block/type" + }, + { + "instancePath": "/0", + "keyword": "not", + "message": "must NOT be valid", + "params": {}, + "schemaPath": "#/allOf/3/not" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/invalid_block.yml", + "path": "$[0]", + "message": "{'block': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "{'block': {}} should not be valid under {'required': ['block']}" + }, + "sub_errors": [ + { + "path": "$[0].block", + "message": "{} is not of type 'array'" + }, + { + "path": "$[0]", + "message": "{'block': {}} should not be valid under {'required': ['block']}" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/local_action.yml b/test/schemas/negative_test/playbooks/tasks/local_action.yml new file mode 100644 index 0000000..d601ff5 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/local_action.yml @@ -0,0 +1 @@ +- local_action: [] # <-- only string or dict is allowed diff --git a/test/schemas/negative_test/playbooks/tasks/local_action.yml.md b/test/schemas/negative_test/playbooks/tasks/local_action.yml.md new file mode 100644 index 0000000..cf67e7b --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/local_action.yml.md @@ -0,0 +1,67 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/local_action", + "keyword": "type", + "message": "must be string,object", + "params": { + "type": [ + "string", + "object" + ] + }, + "schemaPath": "#/properties/local_action/type" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/local_action.yml", + "path": "$[0]", + "message": "{'local_action': []} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].local_action", + "message": "[] is not of type 'string', 'object'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/loop.yml b/test/schemas/negative_test/playbooks/tasks/loop.yml new file mode 100644 index 0000000..651d262 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/loop.yml @@ -0,0 +1,3 @@ +- ansible.builtin.debug: + var: item + loop: {} # <-- map is not valid diff --git a/test/schemas/negative_test/playbooks/tasks/loop.yml.md b/test/schemas/negative_test/playbooks/tasks/loop.yml.md new file mode 100644 index 0000000..de8277f --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/loop.yml.md @@ -0,0 +1,67 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/loop", + "keyword": "type", + "message": "must be string,array", + "params": { + "type": [ + "string", + "array" + ] + }, + "schemaPath": "#/properties/loop/type" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/loop.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'var': 'item'}, 'loop': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].loop", + "message": "{} is not of type 'string', 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/loop2.yml b/test/schemas/negative_test/playbooks/tasks/loop2.yml new file mode 100644 index 0000000..ec2642f --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/loop2.yml @@ -0,0 +1,3 @@ +- ansible.builtin.debug: + var: item + loop: 123 # <-- number is not valid diff --git a/test/schemas/negative_test/playbooks/tasks/loop2.yml.md b/test/schemas/negative_test/playbooks/tasks/loop2.yml.md new file mode 100644 index 0000000..c36d7c9 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/loop2.yml.md @@ -0,0 +1,67 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/loop", + "keyword": "type", + "message": "must be string,array", + "params": { + "type": [ + "string", + "array" + ] + }, + "schemaPath": "#/properties/loop/type" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/loop2.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'var': 'item'}, 'loop': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].loop", + "message": "123 is not of type 'string', 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/no_log_number.yml b/test/schemas/negative_test/playbooks/tasks/no_log_number.yml new file mode 100644 index 0000000..4fa8da2 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/no_log_number.yml @@ -0,0 +1,3 @@ +- ansible.builtin.debug: + msg: foo + no_log: 123 # <-- bad diff --git a/test/schemas/negative_test/playbooks/tasks/no_log_number.yml.md b/test/schemas/negative_test/playbooks/tasks/no_log_number.yml.md new file mode 100644 index 0000000..4b9516c --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/no_log_number.yml.md @@ -0,0 +1,147 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/oneOf/1/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/oneOf/1/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/no_log_number.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'no_log': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].no_log", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].no_log", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].no_log", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].no_log", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].no_log", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].no_log", + "message": "123 is not of type 'string'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/no_log_string.yml b/test/schemas/negative_test/playbooks/tasks/no_log_string.yml new file mode 100644 index 0000000..0e0b71a --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/no_log_string.yml @@ -0,0 +1,5 @@ +- ansible.builtin.debug: + msg: foo + vars: + some_var: true + no_log: some_var # <-- bad, jinja use must be explicit diff --git a/test/schemas/negative_test/playbooks/tasks/no_log_string.yml.md b/test/schemas/negative_test/playbooks/tasks/no_log_string.yml.md new file mode 100644 index 0000000..6742175 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/no_log_string.yml.md @@ -0,0 +1,129 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0/no_log", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/oneOf/0/type" + }, + { + "instancePath": "/0/no_log", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/no_log", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/no_log_string.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'vars': {'some_var': True}, 'no_log': 'some_var'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].no_log", + "message": "'some_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].no_log", + "message": "'some_var' is not of type 'boolean'" + }, + { + "path": "$[0].no_log", + "message": "'some_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].no_log", + "message": "'some_var' is not valid under any of the given schemas" + }, + { + "path": "$[0].no_log", + "message": "'some_var' is not of type 'boolean'" + }, + { + "path": "$[0].no_log", + "message": "'some_var' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml b/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml new file mode 100644 index 0000000..39fe8c7 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml @@ -0,0 +1,3 @@ +- ansible.builtin.debug: + msg: foo + tags: {} # <-- not allowed diff --git a/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml.md b/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml.md new file mode 100644 index 0000000..d860605 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/tags-mapping.yml.md @@ -0,0 +1,125 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/tags-mapping.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'tags': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].tags", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'array'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tags", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "{} is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/tags-string.yml b/test/schemas/negative_test/playbooks/tasks/tags-string.yml new file mode 100644 index 0000000..6512fb5 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/tags-string.yml @@ -0,0 +1,3 @@ +- ansible.builtin.debug: + msg: foo + tags: 123 # <-- not allowed diff --git a/test/schemas/negative_test/playbooks/tasks/tags-string.yml.md b/test/schemas/negative_test/playbooks/tasks/tags-string.yml.md new file mode 100644 index 0000000..0bb7ed0 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/tags-string.yml.md @@ -0,0 +1,125 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/tags/anyOf/0/type" + }, + { + "instancePath": "/0/tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/tags/anyOf/1/type" + }, + { + "instancePath": "/0/tags", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/$defs/tags/anyOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/tags-string.yml", + "path": "$[0]", + "message": "{'ansible.builtin.debug': {'msg': 'foo'}, 'tags': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].tags", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'array'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].tags", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].tags", + "message": "123 is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/when_integer.yml b/test/schemas/negative_test/playbooks/tasks/when_integer.yml new file mode 100644 index 0000000..7758503 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/when_integer.yml @@ -0,0 +1,2 @@ +- action: foo + when: 123 # invalid, number is not accepted diff --git a/test/schemas/negative_test/playbooks/tasks/when_integer.yml.md b/test/schemas/negative_test/playbooks/tasks/when_integer.yml.md new file mode 100644 index 0000000..bc59cc4 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/when_integer.yml.md @@ -0,0 +1,155 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/when_integer.yml", + "path": "$[0]", + "message": "{'action': 'foo', 'when': 123} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].when", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'array'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].when", + "message": "123 is not valid under any of the given schemas" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'boolean'" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'string'" + }, + { + "path": "$[0].when", + "message": "123 is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/when_object.yml b/test/schemas/negative_test/playbooks/tasks/when_object.yml new file mode 100644 index 0000000..430605d --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/when_object.yml @@ -0,0 +1,2 @@ +- action: foo + when: {} # invalid, object is not accepted diff --git a/test/schemas/negative_test/playbooks/tasks/when_object.yml.md b/test/schemas/negative_test/playbooks/tasks/when_object.yml.md new file mode 100644 index 0000000..6c28d0c --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/when_object.yml.md @@ -0,0 +1,155 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be boolean", + "params": { + "type": "boolean" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/0/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/1/type" + }, + { + "instancePath": "/0/when", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/$defs/complex_conditional/oneOf/2/type" + }, + { + "instancePath": "/0/when", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/$defs/complex_conditional/oneOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/when_object.yml", + "path": "$[0]", + "message": "{'action': 'foo', 'when': {}} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0].when", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'boolean'" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'array'" + }, + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].when", + "message": "{} is not valid under any of the given schemas" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'boolean'" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'string'" + }, + { + "path": "$[0].when", + "message": "{} is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml b/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml new file mode 100644 index 0000000..eff6ea0 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml @@ -0,0 +1,2 @@ +- command: echo 123 + with_items: true # invalid, must be a list or templated string diff --git a/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml.md b/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml.md new file mode 100644 index 0000000..ffc8ef8 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/with_items_boolean.yml.md @@ -0,0 +1,88 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/with_items", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/$defs/full-jinja/type" + }, + { + "instancePath": "/0/with_items", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/with_items/anyOf/1/type" + }, + { + "instancePath": "/0/with_items", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/properties/with_items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/with_items_boolean.yml", + "path": "$[0]", + "message": "{'command': 'echo 123', 'with_items': True} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].with_items", + "message": "True is not valid under any of the given schemas" + }, + { + "path": "$[0].with_items", + "message": "True is not of type 'string'" + }, + { + "path": "$[0].with_items", + "message": "True is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml b/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml new file mode 100644 index 0000000..257ffe2 --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml @@ -0,0 +1,2 @@ +- command: echo 123 + with_items: foobar # invalid, probably user wanted "{{ foobar }}"? diff --git a/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml.md b/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml.md new file mode 100644 index 0000000..158b0ee --- /dev/null +++ b/test/schemas/negative_test/playbooks/tasks/with_items_untemplated_string.yml.md @@ -0,0 +1,88 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'block'", + "params": { + "missingProperty": "block" + }, + "schemaPath": "#/required" + }, + { + "instancePath": "/0/with_items", + "keyword": "pattern", + "message": "must match pattern \"^\\{[\\{%](.|[\r\n])*[\\}%]\\}$\"", + "params": { + "pattern": "^\\{[\\{%](.|[\r\n])*[\\}%]\\}$" + }, + "schemaPath": "#/$defs/full-jinja/pattern" + }, + { + "instancePath": "/0/with_items", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/with_items/anyOf/1/type" + }, + { + "instancePath": "/0/with_items", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/properties/with_items/anyOf" + }, + { + "instancePath": "/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/items/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/tasks/with_items_untemplated_string.yml", + "path": "$[0]", + "message": "{'command': 'echo 123', 'with_items': 'foobar'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'block' is a required property" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'block' is a required property" + }, + { + "path": "$[0].with_items", + "message": "'foobar' is not valid under any of the given schemas" + }, + { + "path": "$[0].with_items", + "message": "'foobar' does not match '^\\\\{[\\\\{%](.|[\\r\\n])*[\\\\}%]\\\\}$'" + }, + { + "path": "$[0].with_items", + "message": "'foobar' is not of type 'array'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/var_files_list_number.yml b/test/schemas/negative_test/playbooks/var_files_list_number.yml new file mode 100644 index 0000000..9f3d8dd --- /dev/null +++ b/test/schemas/negative_test/playbooks/var_files_list_number.yml @@ -0,0 +1,5 @@ +--- +- name: var_files should not accept array[number] + hosts: localhost + vars_files: + - 0 diff --git a/test/schemas/negative_test/playbooks/var_files_list_number.yml.md b/test/schemas/negative_test/playbooks/var_files_list_number.yml.md new file mode 100644 index 0000000..6ecc15e --- /dev/null +++ b/test/schemas/negative_test/playbooks/var_files_list_number.yml.md @@ -0,0 +1,118 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/vars_files", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/patternProperties/vars/type" + }, + { + "instancePath": "/0/vars_files/0", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/properties/vars_files/items/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/var_files_list_number.yml", + "path": "$[0]", + "message": "{'name': 'var_files should not accept array[number]', 'hosts': 'localhost', 'vars_files': [0]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'name': 'var_files should not accept array[number]', 'hosts': 'localhost', 'vars_files': [0]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].vars_files", + "message": "[0] is not of type 'object'" + }, + { + "path": "$[0].vars_files[0]", + "message": "0 is not of type 'string'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/var_files_number.yml b/test/schemas/negative_test/playbooks/var_files_number.yml new file mode 100644 index 0000000..fe26650 --- /dev/null +++ b/test/schemas/negative_test/playbooks/var_files_number.yml @@ -0,0 +1,4 @@ +--- +- name: var_files should not accept number + hosts: localhost + vars_files: 0 diff --git a/test/schemas/negative_test/playbooks/var_files_number.yml.md b/test/schemas/negative_test/playbooks/var_files_number.yml.md new file mode 100644 index 0000000..fa97e7e --- /dev/null +++ b/test/schemas/negative_test/playbooks/var_files_number.yml.md @@ -0,0 +1,122 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/vars_files", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/patternProperties/vars/type" + }, + { + "instancePath": "/0/vars_files", + "keyword": "type", + "message": "must be array,string,null", + "params": { + "type": [ + "array", + "string", + "null" + ] + }, + "schemaPath": "#/properties/vars_files/type" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/var_files_number.yml", + "path": "$[0]", + "message": "{'name': 'var_files should not accept number', 'hosts': 'localhost', 'vars_files': 0} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'name': 'var_files should not accept number', 'hosts': 'localhost', 'vars_files': 0} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].vars_files", + "message": "0 is not of type 'object'" + }, + { + "path": "$[0].vars_files", + "message": "0 is not of type 'array', 'string', 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/asterisk.yml b/test/schemas/negative_test/playbooks/vars/asterisk.yml new file mode 100644 index 0000000..9dd2200 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/asterisk.yml @@ -0,0 +1,2 @@ +--- +"*foo": ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/asterisk.yml.md b/test/schemas/negative_test/playbooks/vars/asterisk.yml.md new file mode 100644 index 0000000..1ea9a98 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/asterisk.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "*foo" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/asterisk.yml", + "path": "$", + "message": "{'*foo': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'*foo': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'*foo' does not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'*foo': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'*foo': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml b/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml new file mode 100644 index 0000000..216de64 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml @@ -0,0 +1,2 @@ +--- +foo-bar: ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml.md b/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml.md new file mode 100644 index 0000000..b862e69 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/dash-in-var-name.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "foo-bar" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/dash-in-var-name.yml", + "path": "$", + "message": "{'foo-bar': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'foo-bar': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'foo-bar' does not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'foo-bar': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'foo-bar': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/list.yml b/test/schemas/negative_test/playbooks/vars/list.yml new file mode 100644 index 0000000..909a4d7 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/list.yml @@ -0,0 +1,3 @@ +# invalid vars file, as sequence is not allowed +- foo +- bar diff --git a/test/schemas/negative_test/playbooks/vars/list.yml.md b/test/schemas/negative_test/playbooks/vars/list.yml.md new file mode 100644 index 0000000..e2c9bf5 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/list.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/anyOf/0/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/list.yml", + "path": "$", + "message": "['foo', 'bar'] is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "['foo', 'bar'] is not of type 'object'" + }, + "sub_errors": [ + { + "path": "$", + "message": "['foo', 'bar'] is not of type 'object'" + }, + { + "path": "$", + "message": "['foo', 'bar'] is not of type 'string'" + }, + { + "path": "$", + "message": "['foo', 'bar'] is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml b/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml new file mode 100644 index 0000000..826150d --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml @@ -0,0 +1,2 @@ +--- +12: ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml.md b/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml.md new file mode 100644 index 0000000..7ddcff6 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/numeric-var-name.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "12" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/numeric-var-name.yml", + "path": "$", + "message": "{'12': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'12': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'12' does not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'12': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'12': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/play-keyword.yml b/test/schemas/negative_test/playbooks/vars/play-keyword.yml new file mode 100644 index 0000000..7d277ed --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/play-keyword.yml @@ -0,0 +1,2 @@ +--- +environment: ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/play-keyword.yml.md b/test/schemas/negative_test/playbooks/vars/play-keyword.yml.md new file mode 100644 index 0000000..6b88b2a --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/play-keyword.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "environment" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/play-keyword.yml", + "path": "$", + "message": "{'environment': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'environment': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'environment' does not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'environment': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'environment': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/python-keyword.yml b/test/schemas/negative_test/playbooks/vars/python-keyword.yml new file mode 100644 index 0000000..7b9d01d --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/python-keyword.yml @@ -0,0 +1,3 @@ +--- +async: ... # invalid var name +lambda: ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/python-keyword.yml.md b/test/schemas/negative_test/playbooks/vars/python-keyword.yml.md new file mode 100644 index 0000000..ca42f74 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/python-keyword.yml.md @@ -0,0 +1,86 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "async" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "lambda" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/python-keyword.yml", + "path": "$", + "message": "{'async': '...', 'lambda': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'async': '...', 'lambda': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'async', 'lambda' do not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'async': '...', 'lambda': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'async': '...', 'lambda': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml b/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml new file mode 100644 index 0000000..5f97995 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml @@ -0,0 +1,2 @@ +--- +5foo: ... # invalid var name diff --git a/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml.md b/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml.md new file mode 100644 index 0000000..8b73b0a --- /dev/null +++ b/test/schemas/negative_test/playbooks/vars/varname-numeric-prefix.yml.md @@ -0,0 +1,77 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "5foo" + }, + "schemaPath": "#/anyOf/0/additionalProperties" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be string", + "params": { + "type": "string" + }, + "schemaPath": "#/anyOf/1/type" + }, + { + "instancePath": "", + "keyword": "type", + "message": "must be null", + "params": { + "type": "null" + }, + "schemaPath": "#/anyOf/2/type" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vars/varname-numeric-prefix.yml", + "path": "$", + "message": "{'5foo': '...'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'5foo': '...'} is not of type 'string'" + }, + "sub_errors": [ + { + "path": "$", + "message": "'5foo' does not match any of the regexes: '^(?!(False|None|True|and|any_errors_fatal|as|assert|async|await|become|become_exe|become_flags|become_method|become_user|break|check_mode|class|collections|connection|continue|debugger|def|del|diff|elif|else|environment|except|fact_path|finally|for|force_handlers|from|gather_facts|gather_subset|gather_timeout|global|handlers|hosts|if|ignore_errors|ignore_unreachable|import|in|is|lambda|max_fail_percentage|module_defaults|name|no_log|nonlocal|not|or|order|pass|port|post_tasks|pre_tasks|raise|remote_user|return|roles|run_once|serial|strategy|tags|tasks|throttle|timeout|try|vars|vars_files|vars_prompt|while|with|yield)$)[a-zA-Z_][\\\\w]*$'" + }, + { + "path": "$", + "message": "{'5foo': '...'} is not of type 'string'" + }, + { + "path": "$", + "message": "{'5foo': '...'} is not of type 'null'" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/playbooks/vas_prompt.yml b/test/schemas/negative_test/playbooks/vas_prompt.yml new file mode 100644 index 0000000..a90d131 --- /dev/null +++ b/test/schemas/negative_test/playbooks/vas_prompt.yml @@ -0,0 +1,7 @@ +- hosts: localhost + vars_prompt: + - name: username + prompt: What is your username? + private: false + tags: # tags were never supported, https://github.com/ansible/ansible/issues/1780 + - foo diff --git a/test/schemas/negative_test/playbooks/vas_prompt.yml.md b/test/schemas/negative_test/playbooks/vas_prompt.yml.md new file mode 100644 index 0000000..d2d809d --- /dev/null +++ b/test/schemas/negative_test/playbooks/vas_prompt.yml.md @@ -0,0 +1,118 @@ +# ajv errors + +```json +[ + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'ansible.builtin.import_playbook'", + "params": { + "missingProperty": "ansible.builtin.import_playbook" + }, + "schemaPath": "#/oneOf/0/required" + }, + { + "instancePath": "/0", + "keyword": "required", + "message": "must have required property 'import_playbook'", + "params": { + "missingProperty": "import_playbook" + }, + "schemaPath": "#/oneOf/1/required" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/oneOf" + }, + { + "instancePath": "/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "hosts" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "/0/vars_prompt", + "keyword": "type", + "message": "must be object", + "params": { + "type": "object" + }, + "schemaPath": "#/patternProperties/vars/type" + }, + { + "instancePath": "/0/vars_prompt/0", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "tags" + }, + "schemaPath": "#/$defs/vars_prompt/additionalProperties" + }, + { + "instancePath": "/0", + "keyword": "oneOf", + "message": "must match exactly one schema in oneOf", + "params": { + "passingSchemas": null + }, + "schemaPath": "#/items/oneOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/playbooks/vas_prompt.yml", + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars_prompt': [{'name': 'username', 'prompt': 'What is your username?', 'private': False, 'tags': ['foo']}]} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + "sub_errors": [ + { + "path": "$[0]", + "message": "'hosts' does not match any of the regexes: '^(ansible\\\\.builtin\\\\.)?import_playbook$', 'name', 'tags', 'vars', 'when'" + }, + { + "path": "$[0]", + "message": "{'hosts': 'localhost', 'vars_prompt': [{'name': 'username', 'prompt': 'What is your username?', 'private': False, 'tags': ['foo']}]} is not valid under any of the given schemas" + }, + { + "path": "$[0]", + "message": "'ansible.builtin.import_playbook' is a required property" + }, + { + "path": "$[0]", + "message": "'import_playbook' is a required property" + }, + { + "path": "$[0].vars_prompt", + "message": "[{'name': 'username', 'prompt': 'What is your username?', 'private': False, 'tags': ['foo']}] is not of type 'object'" + }, + { + "path": "$[0].vars_prompt[0]", + "message": "Additional properties are not allowed ('tags' was unexpected)" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/reqs3/meta/requirements.yml b/test/schemas/negative_test/reqs3/meta/requirements.yml new file mode 100644 index 0000000..f28aebb --- /dev/null +++ b/test/schemas/negative_test/reqs3/meta/requirements.yml @@ -0,0 +1,2 @@ +# this should fail validation +foo: bar diff --git a/test/schemas/negative_test/reqs3/meta/requirements.yml.md b/test/schemas/negative_test/reqs3/meta/requirements.yml.md new file mode 100644 index 0000000..5de6643 --- /dev/null +++ b/test/schemas/negative_test/reqs3/meta/requirements.yml.md @@ -0,0 +1,101 @@ +# ajv errors + +```json +[ + { + "instancePath": "", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/anyOf/0/type" + }, + { + "instancePath": "", + "keyword": "required", + "message": "must have required property 'collections'", + "params": { + "missingProperty": "collections" + }, + "schemaPath": "#/anyOf/0/required" + }, + { + "instancePath": "", + "keyword": "required", + "message": "must have required property 'roles'", + "params": { + "missingProperty": "roles" + }, + "schemaPath": "#/anyOf/1/required" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + }, + { + "instancePath": "", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "foo" + }, + "schemaPath": "#/additionalProperties" + }, + { + "instancePath": "", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/reqs3/meta/requirements.yml", + "path": "$", + "message": "{'foo': 'bar'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$", + "message": "{'foo': 'bar'} is not of type 'array'" + }, + "sub_errors": [ + { + "path": "$", + "message": "{'foo': 'bar'} is not of type 'array'" + }, + { + "path": "$", + "message": "Additional properties are not allowed ('foo' was unexpected)" + }, + { + "path": "$", + "message": "{'foo': 'bar'} is not valid under any of the given schemas" + }, + { + "path": "$", + "message": "'collections' is a required property" + }, + { + "path": "$", + "message": "'roles' is a required property" + } + ] + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/meta/argument_specs.yml b/test/schemas/negative_test/roles/meta/argument_specs.yml new file mode 100644 index 0000000..ddc9862 --- /dev/null +++ b/test/schemas/negative_test/roles/meta/argument_specs.yml @@ -0,0 +1,5 @@ +--- +argument_specs: + main: + foo: bar # <-- invalid based on json schema + options: {} diff --git a/test/schemas/negative_test/roles/meta/argument_specs.yml.md b/test/schemas/negative_test/roles/meta/argument_specs.yml.md new file mode 100644 index 0000000..34da932 --- /dev/null +++ b/test/schemas/negative_test/roles/meta/argument_specs.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/argument_specs/main", + "keyword": "additionalProperties", + "message": "must NOT have additional properties", + "params": { + "additionalProperty": "foo" + }, + "schemaPath": "#/additionalProperties" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/meta/argument_specs.yml", + "path": "$.argument_specs.main", + "message": "Additional properties are not allowed ('foo' was unexpected)", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/meta/main.yml b/test/schemas/negative_test/roles/meta/main.yml new file mode 100644 index 0000000..3ed9a8c --- /dev/null +++ b/test/schemas/negative_test/roles/meta/main.yml @@ -0,0 +1,10 @@ +galaxy_info: + description: bar + min_ansible_version: "2.9" + company: foo + license: MIT + galaxy_tags: database # <-- invalid, must be a list of strings + platforms: + - name: Alpine + versions: + - all diff --git a/test/schemas/negative_test/roles/meta/main.yml.md b/test/schemas/negative_test/roles/meta/main.yml.md new file mode 100644 index 0000000..2c9e99b --- /dev/null +++ b/test/schemas/negative_test/roles/meta/main.yml.md @@ -0,0 +1,58 @@ +# ajv errors + +```json +[ + { + "instancePath": "/galaxy_info", + "keyword": "required", + "message": "must have required property 'author'", + "params": { + "missingProperty": "author" + }, + "schemaPath": "#/allOf/0/then/required" + }, + { + "instancePath": "/galaxy_info", + "keyword": "if", + "message": "must match \"then\" schema", + "params": { + "failingKeyword": "then" + }, + "schemaPath": "#/allOf/0/if" + }, + { + "instancePath": "/galaxy_info/galaxy_tags", + "keyword": "type", + "message": "must be array", + "params": { + "type": "array" + }, + "schemaPath": "#/properties/galaxy_tags/type" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/meta/main.yml", + "path": "$.galaxy_info", + "message": "'author' is a required property", + "has_sub_errors": false + }, + { + "filename": "negative_test/roles/meta/main.yml", + "path": "$.galaxy_info.galaxy_tags", + "message": "'database' is not of type 'array'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml b/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml new file mode 100644 index 0000000..1fa41eb --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml @@ -0,0 +1,10 @@ +collections: + - foo # invalid pattern +galaxy_info: + standalone: false # role inside a collection + description: foo + license: bar + platforms: + - name: Fedora + versions: + - all diff --git a/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml.md b/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml.md new file mode 100644 index 0000000..1b8dcd0 --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_collection/meta/main.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/collections/0", + "keyword": "pattern", + "message": "must match pattern \"^[a-z_]+\\.[a-z_]+$\"", + "params": { + "pattern": "^[a-z_]+\\.[a-z_]+$" + }, + "schemaPath": "#/$defs/collections/items/pattern" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/meta_invalid_collection/meta/main.yml", + "path": "$.collections[0]", + "message": "'foo' does not match '^[a-z_]+\\\\.[a-z_]+$'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml b/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml new file mode 100644 index 0000000..488928c --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml @@ -0,0 +1,11 @@ +# role inside a collection +collections: + - FOO.BAR # invalid pattern, need to use lowercase +galaxy_info: + standalone: false + description: foo + license: bar + platforms: + - name: Fedora + versions: + - all diff --git a/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml.md b/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml.md new file mode 100644 index 0000000..5d775f0 --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_collections/meta/main.yml.md @@ -0,0 +1,34 @@ +# ajv errors + +```json +[ + { + "instancePath": "/collections/0", + "keyword": "pattern", + "message": "must match pattern \"^[a-z_]+\\.[a-z_]+$\"", + "params": { + "pattern": "^[a-z_]+\\.[a-z_]+$" + }, + "schemaPath": "#/$defs/collections/items/pattern" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/meta_invalid_collections/meta/main.yml", + "path": "$.collections[0]", + "message": "'FOO.BAR' does not match '^[a-z_]+\\\\.[a-z_]+$'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml b/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml new file mode 100644 index 0000000..e50e5b7 --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml @@ -0,0 +1,12 @@ +--- +# old standalone role +galaxy_info: + description: foo + min_ansible_version: "2.9" + namespace: foo-bar + company: foo + license: MIT + platforms: + - name: Alpine + versions: + - all diff --git a/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml.md b/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml.md new file mode 100644 index 0000000..ad7e9d3 --- /dev/null +++ b/test/schemas/negative_test/roles/meta_invalid_role_namespace/meta/main.yml.md @@ -0,0 +1,58 @@ +# ajv errors + +```json +[ + { + "instancePath": "/galaxy_info", + "keyword": "required", + "message": "must have required property 'author'", + "params": { + "missingProperty": "author" + }, + "schemaPath": "#/allOf/0/then/required" + }, + { + "instancePath": "/galaxy_info", + "keyword": "if", + "message": "must match \"then\" schema", + "params": { + "failingKeyword": "then" + }, + "schemaPath": "#/allOf/0/if" + }, + { + "instancePath": "/galaxy_info/namespace", + "keyword": "pattern", + "message": "must match pattern \"^[a-z][a-z0-9_]+$\"", + "params": { + "pattern": "^[a-z][a-z0-9_]+$" + }, + "schemaPath": "#/properties/namespace/pattern" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/meta_invalid_role_namespace/meta/main.yml", + "path": "$.galaxy_info", + "message": "'author' is a required property", + "has_sub_errors": false + }, + { + "filename": "negative_test/roles/meta_invalid_role_namespace/meta/main.yml", + "path": "$.galaxy_info.namespace", + "message": "'foo-bar' does not match '^[a-z][a-z0-9_]+$'", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml b/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml new file mode 100644 index 0000000..81d4d3d --- /dev/null +++ b/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml @@ -0,0 +1,13 @@ +# old standalone role +galaxy_info: + description: bar + min_ansible_version: "2.9" + company: foo + license: MIT + platforms: + - name: Alpine + versions: + - all + +dependencies: + - version: foo # invalid, should have at least name, role or src properties diff --git a/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml.md b/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml.md new file mode 100644 index 0000000..f09b1ac --- /dev/null +++ b/test/schemas/negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml.md @@ -0,0 +1,101 @@ +# ajv errors + +```json +[ + { + "instancePath": "/dependencies/0", + "keyword": "required", + "message": "must have required property 'role'", + "params": { + "missingProperty": "role" + }, + "schemaPath": "#/anyOf/0/required" + }, + { + "instancePath": "/dependencies/0", + "keyword": "required", + "message": "must have required property 'src'", + "params": { + "missingProperty": "src" + }, + "schemaPath": "#/anyOf/1/required" + }, + { + "instancePath": "/dependencies/0", + "keyword": "required", + "message": "must have required property 'name'", + "params": { + "missingProperty": "name" + }, + "schemaPath": "#/anyOf/2/required" + }, + { + "instancePath": "/dependencies/0", + "keyword": "anyOf", + "message": "must match a schema in anyOf", + "params": {}, + "schemaPath": "#/anyOf" + }, + { + "instancePath": "/galaxy_info", + "keyword": "required", + "message": "must have required property 'author'", + "params": { + "missingProperty": "author" + }, + "schemaPath": "#/allOf/0/then/required" + }, + { + "instancePath": "/galaxy_info", + "keyword": "if", + "message": "must match \"then\" schema", + "params": { + "failingKeyword": "then" + }, + "schemaPath": "#/allOf/0/if" + } +] +``` + +# check-jsonschema + +stdout: + +```json +{ + "status": "fail", + "errors": [ + { + "filename": "negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml", + "path": "$.dependencies[0]", + "message": "{'version': 'foo'} is not valid under any of the given schemas", + "has_sub_errors": true, + "best_match": { + "path": "$.dependencies[0]", + "message": "'role' is a required property" + }, + "sub_errors": [ + { + "path": "$.dependencies[0]", + "message": "'role' is a required property" + }, + { + "path": "$.dependencies[0]", + "message": "'src' is a required property" + }, + { + "path": "$.dependencies[0]", + "message": "'name' is a required property" + } + ] + }, + { + "filename": "negative_test/roles/role_with_bad_deps_in_meta/meta/main.yml", + "path": "$.galaxy_info", + "message": "'author' is a required property", + "has_sub_errors": false + } + ], + "parse_errors": [] +} +``` diff --git a/test/schemas/package-lock.json b/test/schemas/package-lock.json new file mode 100644 index 0000000..5e110a3 --- /dev/null +++ b/test/schemas/package-lock.json @@ -0,0 +1,2447 @@ +{ + "name": "schemas", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "ajv-cli": "^5.0.0", + "ajv-formats": "^2.1.1", + "js-yaml": "^4.1.0", + "safe-stable-stringify": "^2.4.2", + "ts-node": "^10.9.1", + "vscode-json-languageservice": "^5.2.0" + }, + "devDependencies": { + "@types/chai": "^4.3.4", + "@types/js-yaml": "^4.0.5", + "@types/minimatch": "^5.1.2", + "@types/mocha": "^10.0.1", + "@types/node": "^18.13.0", + "chai": "^4.3.7", + "minimatch": "^6.2.0", + "mocha": "^10.2.0", + "typescript": "^4.9.5" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz", + "integrity": "sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.13", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz", + "integrity": "sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz", + "integrity": "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==" + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz", + "integrity": "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==" + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz", + "integrity": "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==" + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz", + "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==" + }, + "node_modules/@types/chai": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw==", + "dev": true + }, + "node_modules/@types/js-yaml": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz", + "integrity": "sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==", + "dev": true + }, + "node_modules/@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "node_modules/@types/node": { + "version": "18.13.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", + "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + }, + "node_modules/@vscode/l10n": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@vscode/l10n/-/l10n-0.0.11.tgz", + "integrity": "sha512-ukOMWnCg1tCvT7WnDfsUKQOFDQGsyR5tNgRpwmqi+5/vzU3ghdDXzvIM4IOPdSb3OeSsBNvmSL8nxIVOqi2WXA==" + }, + "node_modules/acorn": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz", + "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ajv": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.10.0.tgz", + "integrity": "sha512-bzqAEZOjkrUMl2afH8dknrq5KEk2SrwdBROR+vH1EKVQTqaUbJVPdc/gEdggTMM0Se+s+Ja4ju4TlNcStKl2Hw==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-cli": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ajv-cli/-/ajv-cli-5.0.0.tgz", + "integrity": "sha512-LY4m6dUv44HTyhV+u2z5uX4EhPYTM38Iv1jdgDJJJCyOOuqB8KtZEGjPZ2T+sh5ZIJrXUfgErYx/j3gLd3+PlQ==", + "dependencies": { + "ajv": "^8.0.0", + "fast-json-patch": "^2.0.0", + "glob": "^7.1.0", + "js-yaml": "^3.14.0", + "json-schema-migrate": "^2.0.0", + "json5": "^2.1.3", + "minimist": "^1.2.0" + }, + "bin": { + "ajv": "dist/index.js" + }, + "peerDependencies": { + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "ts-node": { + "optional": true + } + } + }, + "node_modules/ajv-cli/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/ajv-cli/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==" + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chai": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", + "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^4.1.2", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + ], + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-json-patch": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/fast-json-patch/-/fast-json-patch-2.2.1.tgz", + "integrity": "sha512-4j5uBaTnsYAV5ebkidvxiLUYOwjQ+JSFljeqfTxCrH9bDmlCQaOJFS84oDJ2rAXZq2yskmk3ORfoP9DCwqFNig==", + "dependencies": { + "fast-deep-equal": "^2.0.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/fast-json-patch/node_modules/fast-deep-equal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.1.tgz", + "integrity": "sha512-reLxBcKUPNBnc/sVtAbxgRVFSegoGeLaSjmphNhcwcolhYLRgtJscn5mRl6YRZNQv40Y7P6JM2YhSIsbL9OB5A==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-schema-migrate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-migrate/-/json-schema-migrate-2.0.0.tgz", + "integrity": "sha512-r38SVTtojDRp4eD6WsCqiE0eNDt4v1WalBXb9cyZYw9ai5cGtBwzRNWjHzJl38w6TxFkXAIA7h+fyX3tnrAFhQ==", + "dependencies": { + "ajv": "^8.0.0" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/json5": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.2.tgz", + "integrity": "sha512-46Tk9JiOL2z7ytNQWFLpj99RZkVgeHf87yGQKsIkaPz1qSH9UczKH1rO7K3wgRselo0tYMUNfecYpm/p1vC7tQ==", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==" + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/loupe": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.4.tgz", + "integrity": "sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==", + "dev": true, + "dependencies": { + "get-func-name": "^2.0.0" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" + }, + "node_modules/minimatch": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-6.2.0.tgz", + "integrity": "sha512-sauLxniAmvnhhRjFwPNnJKaPFYyddAgbYdeUpHULtCT/GhzdCx/MDNy+Y40lBxTQUrMzDE8e0S43Z5uqfO0REg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimatch/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "node_modules/mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "dependencies": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha.js" + }, + "engines": { + "node": ">= 14.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/mocha/node_modules/minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-stable-stringify": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.2.tgz", + "integrity": "sha512-gMxvPJYhP0O9n2pvcfYfIuYgbledAOJFcqRThtPRmjscaipiwcwPPKLytpVzMkG2HAN87Qmo2d4PtGiri1dSLA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==" + }, + "node_modules/vscode-json-languageservice": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-5.2.0.tgz", + "integrity": "sha512-q8Rdhu2HEddRxvlhVqwh0cWmKK+OtyMB2xRhtqXEQ7cjb0iZ14madb90iJe9fCHPjoj9CGBrq6QzuOp8OE6XWg==", + "dependencies": { + "@vscode/l10n": "^0.0.11", + "jsonc-parser": "^3.2.0", + "vscode-languageserver-textdocument": "^1.0.8", + "vscode-languageserver-types": "^3.17.2", + "vscode-uri": "^3.0.7" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.8.tgz", + "integrity": "sha512-1bonkGqQs5/fxGT5UchTgjGVnfysL0O8v1AYMBjqTbWQTFn721zaPGDYFkOKtfDgFiSgXM3KwaG3FMGfW4Ed9Q==" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" + }, + "node_modules/vscode-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.7.tgz", + "integrity": "sha512-eOpPHogvorZRobNqJGhapa0JdwaxpjVvyBp0QIUMRMSf8ZAlqOdEquKuRmw9Qwu0qXtJIWqFtMkmvJjUZmMjVA==" + }, + "node_modules/workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, + "dependencies": { + "@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "requires": { + "@jridgewell/trace-mapping": "0.3.9" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz", + "integrity": "sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==" + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.13", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz", + "integrity": "sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==" + }, + "@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "@tsconfig/node10": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz", + "integrity": "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==" + }, + "@tsconfig/node12": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz", + "integrity": "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==" + }, + "@tsconfig/node14": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz", + "integrity": "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==" + }, + "@tsconfig/node16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz", + "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==" + }, + "@types/chai": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw==", + "dev": true + }, + "@types/js-yaml": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz", + "integrity": "sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==", + "dev": true + }, + "@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, + "@types/mocha": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-10.0.1.tgz", + "integrity": "sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q==", + "dev": true + }, + "@types/node": { + "version": "18.13.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.13.0.tgz", + "integrity": "sha512-gC3TazRzGoOnoKAhUx+Q0t8S9Tzs74z7m0ipwGpSqQrleP14hKxP4/JUeEQcD3W1/aIpnWl8pHowI7WokuZpXg==" + }, + "@vscode/l10n": { + "version": "0.0.11", + "resolved": "https://registry.npmjs.org/@vscode/l10n/-/l10n-0.0.11.tgz", + "integrity": "sha512-ukOMWnCg1tCvT7WnDfsUKQOFDQGsyR5tNgRpwmqi+5/vzU3ghdDXzvIM4IOPdSb3OeSsBNvmSL8nxIVOqi2WXA==" + }, + "acorn": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz", + "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==" + }, + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" + }, + "ajv": { + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.10.0.tgz", + "integrity": "sha512-bzqAEZOjkrUMl2afH8dknrq5KEk2SrwdBROR+vH1EKVQTqaUbJVPdc/gEdggTMM0Se+s+Ja4ju4TlNcStKl2Hw==", + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "ajv-cli": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ajv-cli/-/ajv-cli-5.0.0.tgz", + "integrity": "sha512-LY4m6dUv44HTyhV+u2z5uX4EhPYTM38Iv1jdgDJJJCyOOuqB8KtZEGjPZ2T+sh5ZIJrXUfgErYx/j3gLd3+PlQ==", + "requires": { + "ajv": "^8.0.0", + "fast-json-patch": "^2.0.0", + "glob": "^7.1.0", + "js-yaml": "^3.14.0", + "json-schema-migrate": "^2.0.0", + "json5": "^2.1.3", + "minimist": "^1.2.0" + }, + "dependencies": { + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + } + } + }, + "ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "requires": { + "ajv": "^8.0.0" + } + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==" + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true + }, + "chai": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz", + "integrity": "sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^4.1.2", + "get-func-name": "^2.0.0", + "loupe": "^2.3.1", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==" + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + }, + "deep-eql": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz", + "integrity": "sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, + "diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-json-patch": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/fast-json-patch/-/fast-json-patch-2.2.1.tgz", + "integrity": "sha512-4j5uBaTnsYAV5ebkidvxiLUYOwjQ+JSFljeqfTxCrH9bDmlCQaOJFS84oDJ2rAXZq2yskmk3ORfoP9DCwqFNig==", + "requires": { + "fast-deep-equal": "^2.0.1" + }, + "dependencies": { + "fast-deep-equal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + } + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "dependencies": { + "minimatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.1.tgz", + "integrity": "sha512-reLxBcKUPNBnc/sVtAbxgRVFSegoGeLaSjmphNhcwcolhYLRgtJscn5mRl6YRZNQv40Y7P6JM2YhSIsbL9OB5A==", + "requires": { + "brace-expansion": "^1.1.7" + } + } + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "requires": { + "argparse": "^2.0.1" + } + }, + "json-schema-migrate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-migrate/-/json-schema-migrate-2.0.0.tgz", + "integrity": "sha512-r38SVTtojDRp4eD6WsCqiE0eNDt4v1WalBXb9cyZYw9ai5cGtBwzRNWjHzJl38w6TxFkXAIA7h+fyX3tnrAFhQ==", + "requires": { + "ajv": "^8.0.0" + } + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "json5": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.2.tgz", + "integrity": "sha512-46Tk9JiOL2z7ytNQWFLpj99RZkVgeHf87yGQKsIkaPz1qSH9UczKH1rO7K3wgRselo0tYMUNfecYpm/p1vC7tQ==" + }, + "jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==" + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + } + }, + "loupe": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-2.3.4.tgz", + "integrity": "sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ==", + "dev": true, + "requires": { + "get-func-name": "^2.0.0" + } + }, + "make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" + }, + "minimatch": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-6.2.0.tgz", + "integrity": "sha512-sauLxniAmvnhhRjFwPNnJKaPFYyddAgbYdeUpHULtCT/GhzdCx/MDNy+Y40lBxTQUrMzDE8e0S43Z5uqfO0REg==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + } + } + }, + "minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "mocha": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", + "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", + "dev": true, + "requires": { + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.3", + "debug": "4.3.4", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.2.0", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "5.0.1", + "ms": "2.1.3", + "nanoid": "3.3.3", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "workerpool": "6.2.1", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "minimatch": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", + "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "nanoid": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", + "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + }, + "pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "safe-stable-stringify": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.4.2.tgz", + "integrity": "sha512-gMxvPJYhP0O9n2pvcfYfIuYgbledAOJFcqRThtPRmjscaipiwcwPPKLytpVzMkG2HAN87Qmo2d4PtGiri1dSLA==" + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "requires": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "dependencies": { + "diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==" + } + } + }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "typescript": { + "version": "4.9.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz", + "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==" + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "requires": { + "punycode": "^2.1.0" + } + }, + "v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==" + }, + "vscode-json-languageservice": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/vscode-json-languageservice/-/vscode-json-languageservice-5.2.0.tgz", + "integrity": "sha512-q8Rdhu2HEddRxvlhVqwh0cWmKK+OtyMB2xRhtqXEQ7cjb0iZ14madb90iJe9fCHPjoj9CGBrq6QzuOp8OE6XWg==", + "requires": { + "@vscode/l10n": "^0.0.11", + "jsonc-parser": "^3.2.0", + "vscode-languageserver-textdocument": "^1.0.8", + "vscode-languageserver-types": "^3.17.2", + "vscode-uri": "^3.0.7" + } + }, + "vscode-languageserver-textdocument": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.8.tgz", + "integrity": "sha512-1bonkGqQs5/fxGT5UchTgjGVnfysL0O8v1AYMBjqTbWQTFn721zaPGDYFkOKtfDgFiSgXM3KwaG3FMGfW4Ed9Q==" + }, + "vscode-languageserver-types": { + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" + }, + "vscode-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.7.tgz", + "integrity": "sha512-eOpPHogvorZRobNqJGhapa0JdwaxpjVvyBp0QIUMRMSf8ZAlqOdEquKuRmw9Qwu0qXtJIWqFtMkmvJjUZmMjVA==" + }, + "workerpool": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", + "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true + }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + } + }, + "yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } + } +} diff --git a/test/schemas/package.json b/test/schemas/package.json new file mode 100644 index 0000000..97a3850 --- /dev/null +++ b/test/schemas/package.json @@ -0,0 +1,29 @@ +{ + "dependencies": { + "ajv-cli": "^5.0.0", + "ajv-formats": "^2.1.1", + "js-yaml": "^4.1.0", + "safe-stable-stringify": "^2.4.2", + "ts-node": "^10.9.1", + "vscode-json-languageservice": "^5.2.0" + }, + "scripts": { + "compile": "tsc -p ./src", + "deps": "npx --yes npm-check-updates -u && npm install --ignore-scripts", + "test": "python3 src/rebuild.py && mocha" + }, + "devDependencies": { + "@types/chai": "^4.3.4", + "@types/js-yaml": "^4.0.5", + "@types/minimatch": "^5.1.2", + "@types/mocha": "^10.0.1", + "@types/node": "^18.13.0", + "chai": "^4.3.7", + "minimatch": "^6.2.0", + "mocha": "^10.2.0", + "typescript": "^4.9.5" + }, + "directories": { + "test": "./src" + } +} diff --git a/test/schemas/src/rebuild.py b/test/schemas/src/rebuild.py new file mode 100644 index 0000000..7966ea5 --- /dev/null +++ b/test/schemas/src/rebuild.py @@ -0,0 +1,141 @@ +"""Utility to generate some complex patterns.""" +import copy +import json +import keyword +import sys +from typing import Any + +play_keywords = list( + filter( + None, + """\ +any_errors_fatal +become +become_exe +become_flags +become_method +become_user +check_mode +collections +connection +debugger +diff +environment +fact_path +force_handlers +gather_facts +gather_subset +gather_timeout +handlers +hosts +ignore_errors +ignore_unreachable +max_fail_percentage +module_defaults +name +no_log +order +port +post_tasks +pre_tasks +remote_user +roles +run_once +serial +strategy +tags +tasks +throttle +timeout +vars +vars_files +vars_prompt +""".split(), + ) +) + + +def is_ref_used(obj: Any, ref: str) -> bool: + """Return a reference use from a schema.""" + ref_use = f"#/$defs/{ref}" + if isinstance(obj, dict): + if obj.get("$ref", None) == ref_use: + return True + for _ in obj.values(): + if isinstance(_, (dict, list)): + if is_ref_used(_, ref): + return True + elif isinstance(obj, list): + for _ in obj: + if isinstance(_, (dict, list)): + if is_ref_used(_, ref): + return True + return False + + +if __name__ == "__main__": + invalid_var_names = sorted(list(keyword.kwlist) + play_keywords) + if "__peg_parser__" in invalid_var_names: + invalid_var_names.remove("__peg_parser__") + # flake8: noqa: T201 + print("Updating invalid var names") + + with open("f/vars.json", "r+", encoding="utf-8") as f: + vars_schema = json.load(f) + vars_schema["anyOf"][0]["patternProperties"] = { + f"^(?!({'|'.join(invalid_var_names)})$)[a-zA-Z_][\\w]*$": {} + } + f.seek(0) + json.dump(vars_schema, f, indent=2) + f.write("\n") + f.truncate() + + # flake8: noqa: T201 + print("Compiling subschemas...") + with open("f/ansible.json", encoding="utf-8") as f: + combined_json = json.load(f) + + for subschema in ["tasks", "playbook"]: + sub_json = copy.deepcopy(combined_json) + # remove unsafe keys from root + for key in [ + "$id", + "id", + "title", + "description", + "type", + "default", + "items", + "properties", + "additionalProperties", + "examples", + ]: + if key in sub_json: + del sub_json[key] + for key in sub_json: + if key not in ["$schema", "$defs"]: + print(f"Unexpected key found at combined schema root: ${key}") + sys.exit(2) + # Copy keys from subschema to root + for key, value in combined_json["$defs"][subschema].items(): + sub_json[key] = value + sub_json["$comment"] = "Generated from ansible.json, do not edit." + sub_json[ + "$id" + ] = f"https://raw.githubusercontent.com/ansible-lint/main/src/ansiblelint/schemas/{subschema}.json" + + # Remove all unreferenced ($ref) definitions ($defs) recursively + while True: + spare = [] + for k in sub_json["$defs"].keys(): + if not is_ref_used(sub_json, k): + spare.append(k) + for k in spare: + print(f"{subschema}: deleting unused '{k}' definition") + del sub_json["$defs"][k] + if not spare: + break + + with open(f"f/{subschema}.json", "w", encoding="utf-8") as f: + json.dump(sub_json, f, indent=2, sort_keys=True) + f.write("\n") diff --git a/test/schemas/src/schema.spec.ts b/test/schemas/src/schema.spec.ts new file mode 100644 index 0000000..774bae6 --- /dev/null +++ b/test/schemas/src/schema.spec.ts @@ -0,0 +1,184 @@ +import * as path from "path"; +import Ajv from "ajv"; +import fs from "fs"; +import minimatch from "minimatch"; +import yaml from "js-yaml"; +import { assert } from "chai"; +import stringify from "safe-stable-stringify"; +import { integer } from "vscode-languageserver-types"; +import { exec } from "child_process"; +const spawnSync = require("child_process").spawnSync; + +function ansiRegex({ onlyFirst = false } = {}) { + const pattern = [ + "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)", + "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))", + ].join("|"); + + return new RegExp(pattern, onlyFirst ? undefined : "g"); +} + +function stripAnsi(data: string) { + if (typeof data !== "string") { + throw new TypeError( + `Expected a \`string\`, got \`${typeof data}\ = ${data}` + ); + } + return data.replace(ansiRegex(), ""); +} + +const ajv = new Ajv({ + strictTypes: false, + strict: false, + inlineRefs: true, // https://github.com/ajv-validator/ajv/issues/1581#issuecomment-832211568 + allErrors: true, // https://github.com/ajv-validator/ajv/issues/1581#issuecomment-832211568 +}); + +// load whitelist of all test file subjects schemas can reference +const test_files = getAllFiles("./test"); +const negative_test_files = getAllFiles("./negative_test"); + +// load all schemas +const schema_files = fs + .readdirSync("f/") + .filter((el) => path.extname(el) === ".json"); +console.log(`Schemas: ${schema_files}`); + +describe("schemas under f/", function () { + schema_files.forEach((schema_file) => { + if ( + schema_file.startsWith("_") || + schema_file == "ansible-navigator-config.json" + ) { + return; + } + const schema_json = JSON.parse(fs.readFileSync(`f/${schema_file}`, "utf8")); + ajv.addSchema(schema_json); + const validator = ajv.compile(schema_json); + if (schema_json.examples == undefined) { + console.error( + `Schema file ${schema_file} is missing an examples key that we need for documenting file matching patterns.` + ); + return process.exit(1); + } + describe(schema_file, function () { + getTestFiles(schema_json.examples).forEach( + ({ file: test_file, expect_fail }) => { + it(`linting ${test_file} using ${schema_file}`, function () { + var errors_md = ""; + const result = validator( + yaml.load(fs.readFileSync(test_file, "utf8")) + ); + if (validator.errors) { + errors_md += "# ajv errors\n\n```json\n"; + errors_md += stringify(validator.errors, null, 2); + errors_md += "\n```\n\n"; + } + // validate using check-jsonschema (python-jsonschema): + // const py = exec(); + // Do not use python -m ... calling notation because for some + // reason, nodejs environment lacks some env variables needed + // and breaks usage from inside virtualenvs. + const proc = spawnSync( + `${process.env.VIRTUAL_ENV}/bin/check-jsonschema -v -o json --schemafile f/${schema_file} ${test_file}`, + { shell: true, encoding: "utf-8", stdio: "pipe" } + ); + if (proc.status != 0) { + // real errors are sent to stderr due to https://github.com/python-jsonschema/check-jsonschema/issues/88 + errors_md += "# check-jsonschema\n\nstdout:\n\n```json\n"; + errors_md += stripAnsi(proc.output[1]); + errors_md += "```\n"; + if (proc.output[2]) { + errors_md += "\nstderr:\n\n```\n"; + errors_md += stripAnsi(proc.output[2]); + errors_md += "```\n"; + } + } + + // dump errors to markdown file for manual inspection + const md_filename = `${test_file}.md`; + if (errors_md) { + fs.writeFileSync(md_filename, errors_md); + } else { + // if no error occurs, we should ensure there is no md file present + fs.unlink(md_filename, function (err) { + if (err && err.code != "ENOENT") { + console.error(`Failed to remove ${md_filename}.`); + } + }); + } + assert.equal( + result, + !expect_fail, + `${JSON.stringify(validator.errors)}` + ); + }); + } + ); + // All /$defs/ that have examples property are assumed to be + // subschemas, "tasks" being the primary such case, which is also used + // for validating separated files. + for (var definition in schema_json["$defs"]) { + if (schema_json["$defs"][definition].examples) { + const subschema_uri = `${schema_json["$id"]}#/$defs/${definition}`; + const subschema_validator = ajv.getSchema(subschema_uri); + if (!subschema_validator) { + console.error(`Failed to load subschema ${subschema_uri}`); + return process.exit(1); + } + getTestFiles(schema_json["$defs"][definition].examples).forEach( + ({ file: test_file, expect_fail }) => { + it(`linting ${test_file} using ${subschema_uri}`, function () { + const result = subschema_validator( + yaml.load(fs.readFileSync(test_file, "utf8")) + ); + assert.equal( + result, + !expect_fail, + `${JSON.stringify(validator.errors)}` + ); + }); + } + ); + } + } + }); + }); +}); + +// find all tests for each schema file +function getTestFiles( + globs: string[] +): { file: string; expect_fail: boolean }[] { + const files = Array.from( + new Set( + globs + .map((glob: any) => minimatch.match(test_files, path.join("**", glob))) + .flat() + ) + ); + const negative_files = Array.from( + new Set( + globs + .map((glob: any) => + minimatch.match(negative_test_files, path.join("**", glob)) + ) + .flat() + ) + ); + + // All fails ending with fail, like `foo.fail.yml` are expected to fail validation + let result = files.map((f) => ({ file: f, expect_fail: false })); + result = result.concat( + negative_files.map((f) => ({ file: f, expect_fail: true })) + ); + return result; +} + +function getAllFiles(dir: string): string[] { + return fs.readdirSync(dir).reduce((files: string[], file: string) => { + const name = path.join(dir, file); + const isDirectory = fs.statSync(name).isDirectory(); + return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]; + }, []); +} diff --git a/test/schemas/test/ansible-navigator.yml b/test/schemas/test/ansible-navigator.yml new file mode 100644 index 0000000..e627b78 --- /dev/null +++ b/test/schemas/test/ansible-navigator.yml @@ -0,0 +1,85 @@ +--- +ansible-navigator: + ansible: + config: /tmp/ansible.cfg + cmdline: "--forks 15" + inventories: + - /tmp/test_inventory.yml + playbook: /tmp/test_playbook.yml + + ansible-builder: + workdir: /tmp/ + + ansible-runner: + artifact-dir: /tmp/test1 + rotate-artifacts-count: 10 + timeout: 300 + + app: run + + collection-doc-cache-path: /tmp/cache.db + + color: + enable: False + osc4: False + + documentation: + plugin: + name: shell + type: become + + editor: + command: vim_from_setting + console: False + + exec: + shell: False + command: /bin/foo + + execution-environment: + container-engine: podman + enabled: False + environment-variables: + pass: + - ONE + - TWO + - THREE + set: + KEY1: VALUE1 + KEY2: VALUE2 + KEY3: VALUE3 + image: test_image:latest + pull-policy: never + volume-mounts: + - src: "/test1" + dest: "/test1" + label: "Z" + container-options: + - "--net=host" + + help-builder: False + + help-config: True + + help-doc: True + + help-inventory: True + + help-playbook: False + + inventory-columns: + - ansible_network_os + - ansible_network_cli_ssh_type + - ansible_connection + + logging: + level: critical + append: False + file: /tmp/log.txt + + mode: stdout + + playbook-artifact: + enable: True + replay: /tmp/test_artifact.json + save-as: /tmp/test_artifact.json diff --git a/test/schemas/test/changelog.yml b/test/schemas/test/changelog.yml new file mode 100644 index 0000000..99bcb2f --- /dev/null +++ b/test/schemas/test/changelog.yml @@ -0,0 +1,47 @@ +ancestor: 0.5.4 +releases: + 1.0.0-alpha: + release_date: "2020-01-01" + codename: "The first public one" + changes: + release_summary: A bit o markdown text + major_changes: + - Free form text mentioning a major change + minor_changes: + - Free form text mentioning a minor change + breaking_changes: + - Free form text mentioning a breaking change + deprecated_features: + - A list of strings describing features deprecated in this release + removed_features: + - A list of strings describing features removed in this release + security_fixes: + - A list of strings describing security-relevant bugfixes + bugfixes: + - Fixed bug `#1 <https://example.com>` + known_issues: + - A list of strings describing known issues that are currently not fixed or will not be fixed + trivial: + - A list of strings describing changes that are too trivial to show in the changelog + modules: + - name: short_module_name + description: foo + namespace: foo + plugins: + lookup: + - name: reverse + description: Reverse magic + namespace: null + inventory: + - name: docker + description: Inventory plugin for docker containers + namespace: null + objects: + role: + - name: install_reqs + description: Install all requirements of this collection + namespace: null + playbook: + - name: wipe_personal_data + description: Wipes all personal data from the database + namespace: null diff --git a/test/schemas/test/changelogs/maximal/changelog.yaml b/test/schemas/test/changelogs/maximal/changelog.yaml new file mode 100644 index 0000000..e21b349 --- /dev/null +++ b/test/schemas/test/changelogs/maximal/changelog.yaml @@ -0,0 +1,59 @@ +--- +# Example of minimal changelogs/changelog.yaml that is considered valid +ancestor: null + +releases: + 1.0.0-alpha: + release_date: "1980-01-01" + codename: foo + fragments: [] + changes: + release_summary: This is the initial White Rabbit release. Enjoy! + major_changes: + - The authentication method handling has been rewritten. + minor_changes: + - foo - Module can now reformat hard disks without asking. + - bob lookup - Makes sure Bob isn't there multiple times. + breaking_changes: + - Due to the security bug in the post module, the module no longer accepts the password + option. Please stop using the option and change any password you ever supplied to the + module. + deprecated_features: + - foo - The bar option has been deprecated. Use the username option instead. + - send_request - The quick option has been deprecated. Use the protocol option instead. + removed_features: + - foo - The baz option has been removed. It has never been used anyway. + security_fixes: + - post - The module accidentally sent your password in plaintext to all servers it could find. + bugfixes: + - post - The module made PUT requests instead of POST requests. + - get - The module will no longer crash if it received invalid JSON data + trivial: + - something that is not included in release notes + known_issues: + - som other + modules: + - name: head + description: Make a HEAD request + namespace: "net_tools.rest" + - name: echo + description: Echo params + namespace: "" + plugins: + lookup: + - name: reverse + description: Reverse magic + namespace: null + inventory: + - name: docker + description: Inventory plugin for docker containers + namespace: null + objects: + role: + - name: install_reqs + description: Install all requirements of this collection + namespace: null + playbook: + - name: wipe_personal_data + description: Wipes all personal data from the database + namespace: null diff --git a/test/schemas/test/changelogs/minimal/changelog.yaml b/test/schemas/test/changelogs/minimal/changelog.yaml new file mode 100644 index 0000000..d1618f0 --- /dev/null +++ b/test/schemas/test/changelogs/minimal/changelog.yaml @@ -0,0 +1,3 @@ +--- +# Example of minimal changelogs/changelog.yaml that is considered valid +releases: {} diff --git a/test/schemas/test/execution-environment.yml b/test/schemas/test/execution-environment.yml new file mode 100644 index 0000000..e447a9a --- /dev/null +++ b/test/schemas/test/execution-environment.yml @@ -0,0 +1,21 @@ +--- +# Example from https://docs.ansible.com/automation-controller/latest/html/userguide/ee_reference.html +version: 1 + +build_arg_defaults: + EE_BASE_IMAGE: "quay.io/ansible/ansible-runner:stable-2.10-devel" + +ansible_config: "ansible.cfg" + +dependencies: + galaxy: requirements.yml + python: requirements.txt + system: bindep.txt + +additional_build_steps: + prepend: | + RUN whoami + RUN cat /etc/os-release + append: + - RUN echo This is a post-install command! + - RUN ls -la /etc diff --git a/test/schemas/test/galaxy.yml b/test/schemas/test/galaxy.yml new file mode 100644 index 0000000..1c77216 --- /dev/null +++ b/test/schemas/test/galaxy.yml @@ -0,0 +1,13 @@ +name: foo +namespace: bar +version: 1.2.3 +authors: + - John +readme: ../README.md +description: ... +dependencies: + "other_namespace.collection1": ">=1.0.0" + "other_namespace.collection2": ">=2.0.0,<3.0.0" + "anderson55.my_collection": "*" # note: "*" selects the highest version available +# upload to galaxy will fail if a repository key is not present +repository: https://www.github.com/my_org/my_collection diff --git a/test/schemas/test/inventory.yml b/test/schemas/test/inventory.yml new file mode 100644 index 0000000..48a0e6a --- /dev/null +++ b/test/schemas/test/inventory.yml @@ -0,0 +1,13 @@ +all: + hosts: + mail.example.com: + children: + webservers: + hosts: + foo.example.com: + bar[01:50:2].example.com: + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: diff --git a/test/schemas/test/inventory/inventory.yml b/test/schemas/test/inventory/inventory.yml new file mode 100644 index 0000000..8752d9b --- /dev/null +++ b/test/schemas/test/inventory/inventory.yml @@ -0,0 +1,31 @@ +--- +# https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html +ungrouped: {} +all: + hosts: + mail.example.com: + children: + webservers: + hosts: + foo.example.com: + bar.example.com: + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: + east: + hosts: + foo.example.com: + one.example.com: + two.example.com: + west: + hosts: + bar.example.com: + three.example.com: + prod: + children: + east: {} + test: + children: + west: {} diff --git a/test/schemas/test/inventory/production.yml b/test/schemas/test/inventory/production.yml new file mode 100644 index 0000000..6350bda --- /dev/null +++ b/test/schemas/test/inventory/production.yml @@ -0,0 +1,37 @@ +all: + hosts: + mail.example.com: + children: + webservers: + hosts: + foo.example.com: + bar.example.com: + # ranges are supported: + www[01:50].example.com: + www[01:50:2].example.com: + # these are variables: + var_1: value_1 + another_var: 200 + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: + east: + hosts: + foo.example.com: + one.example.com: + two.example.com: + west: + hosts: + bar.example.com: + three.example.com: + prod: + children: + east: + test: + children: + west: + # add variables for all hosts + vars: + my_var: 123 diff --git a/test/schemas/test/meta/requirements.yml b/test/schemas/test/meta/requirements.yml new file mode 100644 index 0000000..6b07e4f --- /dev/null +++ b/test/schemas/test/meta/requirements.yml @@ -0,0 +1,3 @@ +# requirements v2 +collections: [] +roles: [] diff --git a/test/schemas/test/molecule/cluster/base.yml b/test/schemas/test/molecule/cluster/base.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/schemas/test/molecule/cluster/base.yml diff --git a/test/schemas/test/molecule/cluster/converge.yml b/test/schemas/test/molecule/cluster/converge.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/schemas/test/molecule/cluster/converge.yml diff --git a/test/schemas/test/molecule/cluster/foobar.yml b/test/schemas/test/molecule/cluster/foobar.yml new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/schemas/test/molecule/cluster/foobar.yml diff --git a/test/schemas/test/molecule/cluster/molecule.yml b/test/schemas/test/molecule/cluster/molecule.yml new file mode 100644 index 0000000..bf40d14 --- /dev/null +++ b/test/schemas/test/molecule/cluster/molecule.yml @@ -0,0 +1,77 @@ +--- +dependency: + name: galaxy + +driver: + name: docker + +lint: | + set -e + yamllint -c molecule/yaml-lint.yml . + ansible-lint + flake8 + +platforms: + - name: instance-1 + image: "geerlingguy/docker-${MOLECULE_DISTRO:-centos7}-ansible:latest" + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + groups: + - zookeeper + env: + - Hello: world! + + - name: instance-2 + image: "geerlingguy/docker-${MOLECULE_DISTRO:-centos7}-ansible:latest" + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + groups: + - zookeeper + env: + - Hello: world! + + - name: instance-3 + image: "geerlingguy/docker-${MOLECULE_DISTRO:-centos7}-ansible:latest" + command: ${MOLECULE_DOCKER_COMMAND:-""} + volumes: + - /sys/fs/cgroup:/sys/fs/cgroup:ro + privileged: true + pre_build_image: true + groups: + - zookeeper + env: + - Hello: world! + +provisioner: + name: ansible + log: false + playbooks: + converge: ${MOLECULE_PLAYBOOK:-converge.yml} + inventory: + host_vars: + instance-1: + zookeeper_id: 0 + instance-2: + zookeeper_id: 1 + instance-3: + zookeeper_id: 2 + +scenario: + name: cluster + test_sequence: + - destroy + - create + - prepare + - converge + - check + - verify + - destroy + +verifier: + name: ansible diff --git a/test/schemas/test/molecule/default/molecule.yml b/test/schemas/test/molecule/default/molecule.yml new file mode 100644 index 0000000..b3e290d --- /dev/null +++ b/test/schemas/test/molecule/default/molecule.yml @@ -0,0 +1,118 @@ +--- +dependency: + name: shell + enabled: true + command: path/to/command --flag1 subcommand --flag2 + options: + ignore-certs: true + ignore-errors: true + env: + FOO: bar + +lint: | + set -e + yamllint . + ansible-lint + flake8 + +driver: + name: podman + options: + managed: false + login_cmd_template: ... + ansible_connection_options: + ansible_connection: ssh + # vagrant options: + provider: + name: virtualbox + +log: true + +platforms: + - name: ubi8 + hostname: ubi8 + children: [] # list of strings + unknown_property_foo: bar # unknown properties should be allowed for drivers + groups: + - ubi8 + image: ubi8/ubi-init + pre_build_image: true + registry: + url: registry.access.redhat.com + dockerfile: Dockerfile + pkg_extras: python*setuptools + volumes: + - /etc/ci/mirror_info.sh:/etc/ci/mirror_info.sh:ro + - /etc/pki/rpm-gpg:/etc/pki/rpm-gpg + privileged: true + environment: &env + http_proxy: "{{ lookup('env', 'http_proxy') }}" + https_proxy: "{{ lookup('env', 'https_proxy') }}" + ulimits: &ulimit + - host + # vagrant ones + box: foo/bar + memory: 1024 + cpus: 2 + provider_raw_config_args: [] + networks: # used by docker/podman + - name: foo + + - name: ubi7 + hostname: ubi7 + children: ["ubi8"] + groups: + - ubi7 + image: ubi7/ubi-init + registry: + url: registry.access.redhat.com + command: /sbin/init + tmpfs: + - /run + - /tmp + volumes: + - /etc/ci/mirror_info.sh:/etc/ci/mirror_info.sh:ro + - /etc/pki/rpm-gpg:/etc/pki/rpm-gpg + - /sys/fs/cgroup:/sys/fs/cgroup:ro + network_mode: service:vpn + privileged: true + environment: &env + http_proxy: "{{ lookup('env', 'http_proxy') }}" + https_proxy: "{{ lookup('env', 'https_proxy') }}" + ulimits: &ulimit + - host + +provisioner: + playbooks: + prepare: prepare.yml + inventory: + hosts: + all: + hosts: + ubi8: + ansible_python_interpreter: /usr/bin/python3 + ubi7: + selinux: permissive + ubi8: + selinux: enforced + name: ansible + log: true + env: + ANSIBLE_STDOUT_CALLBACK: yaml + config_options: + defaults: + fact_caching: jsonfile + fact_caching_connection: /tmp/molecule/facts + +scenario: + test_sequence: + - destroy + - create + - prepare + - converge + - check + - verify + - destroy + +verifier: + name: testinfra diff --git a/test/schemas/test/molecule/vagrant/molecule.yml b/test/schemas/test/molecule/vagrant/molecule.yml new file mode 100644 index 0000000..5630df6 --- /dev/null +++ b/test/schemas/test/molecule/vagrant/molecule.yml @@ -0,0 +1,47 @@ +--- +dependency: + name: shell + enabled: false + +lint: | + set -e + yamllint . + ansible-lint + flake8 + +driver: + name: vagrant + provider: + name: libvirt + provision: false + cachier: machine + parallel: true + default_box: "generic/alpine310" +platforms: + - name: instance + hostname: foo.bar.com + interfaces: + - auto_config: true + network_name: private_network + type: dhcp + instance_raw_config_args: + - 'vm.synced_folder ".", "/vagrant", type: "rsync"' + - 'vm.provision :shell, inline: "uname"' + config_options: + ssh.keep_alive: true + ssh.remote_user: "vagrant" + synced_folder: true + box: fedora/32-cloud-base + box_version: 32.20200422.0 + box_url: "http://127.0.0.1/box.img" + memory: 512 + cpus: 1 + provider_options: + video_type: "vga" + provider_raw_config_args: + - cpuset = '1-4,^3,6' + - name: instance2 + hostname: false + +provisioner: + name: ansible diff --git a/test/schemas/test/playbooks/block.yml b/test/schemas/test/playbooks/block.yml new file mode 100644 index 0000000..631242b --- /dev/null +++ b/test/schemas/test/playbooks/block.yml @@ -0,0 +1,10 @@ +- hosts: localhost + tasks: + - debug: + msg: task under no block + - block: + - debug: + msg: task under one level of block + - block: + - debug: + msg: task under two levels of block diff --git a/test/schemas/test/playbooks/defaults/foo.yml b/test/schemas/test/playbooks/defaults/foo.yml new file mode 100644 index 0000000..47d9438 --- /dev/null +++ b/test/schemas/test/playbooks/defaults/foo.yml @@ -0,0 +1,3 @@ +# defaults have same format as vars +in_is_reserved: ... +ss: ss diff --git a/test/schemas/test/playbooks/environment.yml b/test/schemas/test/playbooks/environment.yml new file mode 100644 index 0000000..d25fd1b --- /dev/null +++ b/test/schemas/test/playbooks/environment.yml @@ -0,0 +1,7 @@ +--- +- hosts: localhost + environment: # <- valid + FOO: BAR + +- hosts: localhost + environment: "{{ foo }}" # <- valid diff --git a/test/schemas/test/playbooks/failed_when.yml b/test/schemas/test/playbooks/failed_when.yml new file mode 100644 index 0000000..14c942a --- /dev/null +++ b/test/schemas/test/playbooks/failed_when.yml @@ -0,0 +1,18 @@ +- hosts: localhost + tasks: + - name: foo + ansible.builtin.debug: + msg: foo! + failed_when: false # <- valid + + - name: foo + ansible.builtin.debug: + msg: foo! + failed_when: "string is valid too" # <- valid + + - name: foo + ansible.builtin.debug: + msg: foo! + failed_when: # <- lists are valid too + - foo + - bar diff --git a/test/schemas/test/playbooks/full-jinja.yml b/test/schemas/test/playbooks/full-jinja.yml new file mode 100644 index 0000000..22eaafe --- /dev/null +++ b/test/schemas/test/playbooks/full-jinja.yml @@ -0,0 +1,16 @@ +--- +- name: Test that schema allows multiline-jinja + hosts: localhost + # https://github.com/ansible/ansible-lint/issues/2772 + become: >- + {{ + true + }} + tasks: + - name: Test more complex jinja is also allowed + ansible.builtin.debug: + msg: "{{ item }}" + # that below is valid and show be allowed: + with_items: >- + {%- set ns = [1, 1, 2] -%} + {{- ns | unique -}} diff --git a/test/schemas/test/playbooks/gather_facts.yml b/test/schemas/test/playbooks/gather_facts.yml new file mode 100644 index 0000000..598188d --- /dev/null +++ b/test/schemas/test/playbooks/gather_facts.yml @@ -0,0 +1,6 @@ +--- +- hosts: localhost + gather_facts: false + tasks: + - ansible.builtin.debug: + msg: foo diff --git a/test/schemas/test/playbooks/gather_subset.yml b/test/schemas/test/playbooks/gather_subset.yml new file mode 100644 index 0000000..de0e689 --- /dev/null +++ b/test/schemas/test/playbooks/gather_subset.yml @@ -0,0 +1,15 @@ +--- +- hosts: localhost + gather_subset: + - all + - "!network" + tasks: + - ansible.builtin.debug: + msg: foo + +- hosts: localhost + gather_subset: + - all + tasks: + - ansible.builtin.debug: + msg: bar diff --git a/test/schemas/test/playbooks/ignore_errors..yml b/test/schemas/test/playbooks/ignore_errors..yml new file mode 100644 index 0000000..6c92046 --- /dev/null +++ b/test/schemas/test/playbooks/ignore_errors..yml @@ -0,0 +1,9 @@ +- hosts: localhost + tasks: + - command: echo 123 + ignore_errors: true + + - command: echo 123 + vars: + should_ignore_errors: true + ignore_errors: "{{ should_ignore_errors }}" diff --git a/test/schemas/test/playbooks/import_playbook.yml b/test/schemas/test/playbooks/import_playbook.yml new file mode 100644 index 0000000..efd8787 --- /dev/null +++ b/test/schemas/test/playbooks/import_playbook.yml @@ -0,0 +1,9 @@ +- ansible.builtin.import_playbook: other.yml + +- import_playbook: other.yml + tags: + - foo + +- import_playbook: other.yml + when: + - foo is true diff --git a/test/schemas/test/playbooks/included.yml b/test/schemas/test/playbooks/included.yml new file mode 100644 index 0000000..468a17c --- /dev/null +++ b/test/schemas/test/playbooks/included.yml @@ -0,0 +1 @@ +- hosts: localhost diff --git a/test/schemas/test/playbooks/integers.yml b/test/schemas/test/playbooks/integers.yml new file mode 100644 index 0000000..861acee --- /dev/null +++ b/test/schemas/test/playbooks/integers.yml @@ -0,0 +1,23 @@ +--- +- hosts: localhost + vars: + some: 0 + gather_timeout: "{{ some }}" + tasks: + - ansible.builtin.debug: + msg: "{{ item }}" + async: 0 + poll: 0 + delay: 0 + timeout: 0 + port: 0 + - ansible.builtin.debug: + msg: "{{ item }}" + async: "{{ some }}" + poll: "{{ some }}" + delay: "{{ some }}" + timeout: "{{ some }}" + port: "{{ some }}" + +- hosts: localhost + gather_timeout: 0 diff --git a/test/schemas/test/playbooks/local_action_dict.yml b/test/schemas/test/playbooks/local_action_dict.yml new file mode 100644 index 0000000..05b3129 --- /dev/null +++ b/test/schemas/test/playbooks/local_action_dict.yml @@ -0,0 +1,5 @@ +- hosts: localhost + tasks: + - local_action: + module: ansible.builtin.debug + msg: hello diff --git a/test/schemas/test/playbooks/local_action_string.yml b/test/schemas/test/playbooks/local_action_string.yml new file mode 100644 index 0000000..e7dacc4 --- /dev/null +++ b/test/schemas/test/playbooks/local_action_string.yml @@ -0,0 +1,3 @@ +- hosts: localhost + tasks: + - local_action: "ansible.builtin.debug msg=hello" diff --git a/test/schemas/test/playbooks/loop.yml b/test/schemas/test/playbooks/loop.yml new file mode 100644 index 0000000..c0e1734 --- /dev/null +++ b/test/schemas/test/playbooks/loop.yml @@ -0,0 +1,9 @@ +--- +- hosts: localhost + tasks: + - name: that should pass + ansible.builtin.debug: + var: item + loop: + - foo + - bar diff --git a/test/schemas/test/playbooks/no_log.yml b/test/schemas/test/playbooks/no_log.yml new file mode 100644 index 0000000..e1944dd --- /dev/null +++ b/test/schemas/test/playbooks/no_log.yml @@ -0,0 +1,11 @@ +- hosts: localhost + vars: + some_var: true + tasks: + - ansible.builtin.debug: + msg: foo + no_log: true + + - ansible.builtin.debug: + msg: foo + no_log: "{{ some_var }}" diff --git a/test/schemas/test/playbooks/roles.yml b/test/schemas/test/playbooks/roles.yml new file mode 100644 index 0000000..a996ce0 --- /dev/null +++ b/test/schemas/test/playbooks/roles.yml @@ -0,0 +1,13 @@ +- hosts: localhost + roles: [] + +- hosts: localhost + roles: + - foo + - role: "path/to/role" + vars: + FOO: bar + tags: + - foo + - role: bar + tags: string_tag diff --git a/test/schemas/test/playbooks/run.yml b/test/schemas/test/playbooks/run.yml new file mode 100644 index 0000000..52e7001 --- /dev/null +++ b/test/schemas/test/playbooks/run.yml @@ -0,0 +1,42 @@ +- name: foo + ansible.builtin.import_playbook: included.yml + +- hosts: # to check if lists are allowed: + - localhost + - webservers + # validate serial allows strings like percentage value + serial: 10% + handlers: + - name: handler 1 + ansible.builtin.debug: + msg: "I am handler 1" + listen: "always handler" + + - name: handler 2 + ansible.builtin.debug: + msg: "I am handler 2" + listen: # to check if lists are allowed: + - "list listening handler" + - "other listening topic" + +- hosts: localhost + serial: 1 # validate serial allows integer + +- hosts: localhost + serial: "{{ 1 }}" # jinja also ok + +- hosts: localhost + serial: # validate serial allows these too: + - 123 + - 10% + - "{{ some }}" # jinja also ok + +- hosts: localhost + tasks: + - debug: + msg: "failed_when should accept booleans" + failed_when: false + + - debug: + msg: "failed_when should allow strings" + failed_when: "'foo' in 'foobar'" diff --git a/test/schemas/test/playbooks/run_once.yml b/test/schemas/test/playbooks/run_once.yml new file mode 100644 index 0000000..be36c8e --- /dev/null +++ b/test/schemas/test/playbooks/run_once.yml @@ -0,0 +1,6 @@ +- hosts: localhost + tasks: + - name: foo2 + ansible.builtin.debug: + msg: foo! + run_once: "{{ true }}" # valid diff --git a/test/schemas/test/playbooks/tags.yml b/test/schemas/test/playbooks/tags.yml new file mode 100644 index 0000000..b758257 --- /dev/null +++ b/test/schemas/test/playbooks/tags.yml @@ -0,0 +1,23 @@ +- hosts: localhost + roles: + - role: foo + tags: foo # <-- allowed + - role: foo + tags: # <-- allowed + - foo + - bar + tags: # <-- allowed + - foo + - bar + tasks: + - ansible.builtin.debug: + msg: "..." + tags: # <-- allowed + - foo + - bar + - ansible.builtin.debug: + msg: "..." + tags: # <-- allowed + - foo +- hosts: localhost + tags: foo # <-- allowed diff --git a/test/schemas/test/playbooks/tasks.yml b/test/schemas/test/playbooks/tasks.yml new file mode 100644 index 0000000..b01cf8c --- /dev/null +++ b/test/schemas/test/playbooks/tasks.yml @@ -0,0 +1,5 @@ +- hosts: localhost + pre_tasks: [] + post_tasks: [] + tasks: [] + handlers: [] diff --git a/test/schemas/test/playbooks/tasks/args.yml b/test/schemas/test/playbooks/tasks/args.yml new file mode 100644 index 0000000..1e25e1d --- /dev/null +++ b/test/schemas/test/playbooks/tasks/args.yml @@ -0,0 +1,4 @@ +- action: foo + args: {} +- action: foo + args: "{{ {} }}" diff --git a/test/schemas/test/playbooks/tasks/become_method.yml b/test/schemas/test/playbooks/tasks/become_method.yml new file mode 100644 index 0000000..9d63a76 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/become_method.yml @@ -0,0 +1,7 @@ +- command: echo 123 + become_method: sudo + +- command: echo 123 + vars: + sudo_var: doo + become_method: "{{ sudo_var }}" # templating is ok diff --git a/test/schemas/test/playbooks/tasks/changed_when.yml b/test/schemas/test/playbooks/tasks/changed_when.yml new file mode 100644 index 0000000..7887ac7 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/changed_when.yml @@ -0,0 +1,10 @@ +- command: echo 123 + changed_when: false + +- command: echo 123 + changed_when: '"1" in ["1", "2", "3"]' + +- command: echo 123 + changed_when: # valid, all items must evaluate as true (AND) + - "foo is defined" + - '"1" in ["1", "2", "3"]' diff --git a/test/schemas/test/playbooks/tasks/diff.yml b/test/schemas/test/playbooks/tasks/diff.yml new file mode 100644 index 0000000..cc0bebc --- /dev/null +++ b/test/schemas/test/playbooks/tasks/diff.yml @@ -0,0 +1,4 @@ +- action: foo + diff: true +- action: foo + diff: "{{ true }}" diff --git a/test/schemas/test/playbooks/tasks/empty_tasks.yml b/test/schemas/test/playbooks/tasks/empty_tasks.yml new file mode 100644 index 0000000..7ee1211 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/empty_tasks.yml @@ -0,0 +1,2 @@ +--- +# this is a valid tasks file, loaded as 'null' document. diff --git a/test/schemas/test/playbooks/tasks/ignore_errors.yml b/test/schemas/test/playbooks/tasks/ignore_errors.yml new file mode 100644 index 0000000..2f253f2 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/ignore_errors.yml @@ -0,0 +1,7 @@ +- command: echo 123 + ignore_errors: true + +- command: echo 123 + vars: + should_ignore_errors: true + ignore_errors: "{{ should_ignore_errors }}" diff --git a/test/schemas/test/playbooks/tasks/local_action_dict.yml b/test/schemas/test/playbooks/tasks/local_action_dict.yml new file mode 100644 index 0000000..5351ab9 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/local_action_dict.yml @@ -0,0 +1,3 @@ +- local_action: + module: ansible.builtin.debug + msg: hello diff --git a/test/schemas/test/playbooks/tasks/local_action_string.yml b/test/schemas/test/playbooks/tasks/local_action_string.yml new file mode 100644 index 0000000..93d98e0 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/local_action_string.yml @@ -0,0 +1 @@ +- local_action: "ansible.builtin.debug msg=hello" diff --git a/test/schemas/test/playbooks/tasks/loop.yml b/test/schemas/test/playbooks/tasks/loop.yml new file mode 100644 index 0000000..33c6130 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/loop.yml @@ -0,0 +1,6 @@ +- name: that should pass + ansible.builtin.debug: + var: item + loop: + - foo + - bar diff --git a/test/schemas/test/playbooks/tasks/no_log.yml b/test/schemas/test/playbooks/tasks/no_log.yml new file mode 100644 index 0000000..83a12d0 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/no_log.yml @@ -0,0 +1,11 @@ +- ansible.builtin.debug: + msg: foo + no_log: true # valid + vars: + some_var: true + +- ansible.builtin.debug: + msg: foo + no_log: "{{ some_var }}" # valid too + vars: + some_var: true diff --git a/test/schemas/test/playbooks/tasks/notify.yml b/test/schemas/test/playbooks/tasks/notify.yml new file mode 100644 index 0000000..88432d9 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/notify.yml @@ -0,0 +1,11 @@ +- name: notify single handler + ansible.builtin.debug: + msg: task with single handler + notify: handler1 + +- name: notify multiple handlers + ansible.builtin.debug: + msg: task with multiple handlers + notify: + - handler1 + - handler2 diff --git a/test/schemas/test/playbooks/tasks/run_once.yml b/test/schemas/test/playbooks/tasks/run_once.yml new file mode 100644 index 0000000..0f3f6f7 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/run_once.yml @@ -0,0 +1,9 @@ +- name: foo + ansible.builtin.debug: + msg: foo! + run_once: true # valid + +- name: foo2 + ansible.builtin.debug: + msg: foo! + run_once: "{{ true }}" # valid diff --git a/test/schemas/test/playbooks/tasks/some_tasks.yml b/test/schemas/test/playbooks/tasks/some_tasks.yml new file mode 100644 index 0000000..2430d52 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/some_tasks.yml @@ -0,0 +1,8 @@ +- name: foo + debug: + msg: bar + delegate_facts: true + +- block: + - debug: + msg: "block under one level of block" diff --git a/test/schemas/test/playbooks/tasks/tags.yml b/test/schemas/test/playbooks/tasks/tags.yml new file mode 100644 index 0000000..a0b7454 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/tags.yml @@ -0,0 +1,29 @@ +- command: echo 123 + tags: + - foo + - bar + +- command: echo 123 + tags: foo + +- block: + - command: echo 123 + tags: + - foo + - bar + + - command: echo 123 + tags: foo + tags: + - foo + - bar + +- block: + - command: echo 123 + tags: + - foo + - bar + + - command: echo 123 + tags: foo + tags: foo diff --git a/test/schemas/test/playbooks/tasks/templated_become.yml b/test/schemas/test/playbooks/tasks/templated_become.yml new file mode 100644 index 0000000..a8cfad3 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/templated_become.yml @@ -0,0 +1,12 @@ +- name: foo + ansible.builtin.debug: + msg: foo! + become: "{{ firewalld_become }}" # <- valid + +- name: foo block + become: "{{ firewalld_become }}" # <- valid + block: + - name: foo + ansible.builtin.debug: + msg: foo! + become: "{{ firewalld_become }}" # <- valid diff --git a/test/schemas/test/playbooks/tasks/templated_integers.yml b/test/schemas/test/playbooks/tasks/templated_integers.yml new file mode 100644 index 0000000..59c4530 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/templated_integers.yml @@ -0,0 +1,5 @@ +- debug: + msg: foo + retries: "{{ 2 }}" # <-- valid + port: "{{ 80 }}" # <-- valid + poll: "{{ 2 }}" # <-- valid diff --git a/test/schemas/test/playbooks/tasks/throttled.yml b/test/schemas/test/playbooks/tasks/throttled.yml new file mode 100644 index 0000000..e1be471 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/throttled.yml @@ -0,0 +1,5 @@ +- action: foo + throttle: 1 # valid + +- action: foo + throttle: "{{ 1 }}" # valid diff --git a/test/schemas/test/playbooks/tasks/until.yml b/test/schemas/test/playbooks/tasks/until.yml new file mode 100644 index 0000000..2146a9d --- /dev/null +++ b/test/schemas/test/playbooks/tasks/until.yml @@ -0,0 +1,14 @@ +- ansible.builtin.debug: + msg: "valid" + until: true + +- ansible.builtin.debug: + msg: "valid" + until: + - "foo not in bar" + +- ansible.builtin.debug: + msg: "valid" + until: + - "'1' in ['1', '2', '3']" + - "foo is not defined" diff --git a/test/schemas/test/playbooks/tasks/when.yml b/test/schemas/test/playbooks/tasks/when.yml new file mode 100644 index 0000000..7874329 --- /dev/null +++ b/test/schemas/test/playbooks/tasks/when.yml @@ -0,0 +1,10 @@ +- action: foo + when: true # valid + +- action: foo 2 + when: foo in bar # valid + +- action: foo 3 + when: # valid + - foo in bar + - apple is orange diff --git a/test/schemas/test/playbooks/tasks/with_items.yml b/test/schemas/test/playbooks/tasks/with_items.yml new file mode 100644 index 0000000..07c72aa --- /dev/null +++ b/test/schemas/test/playbooks/tasks/with_items.yml @@ -0,0 +1,16 @@ +- command: echo 123 + with_items: [] + +- command: echo 123 + with_items: + - 1 + - foo + - {} + - [] + +- command: echo 123 + vars: + my_list: + - 1 + - 2 + with_items: "{{ my_list }}" diff --git a/test/schemas/test/playbooks/templated_become.yml b/test/schemas/test/playbooks/templated_become.yml new file mode 100644 index 0000000..518e46b --- /dev/null +++ b/test/schemas/test/playbooks/templated_become.yml @@ -0,0 +1,16 @@ +--- +- hosts: localhost + become: "{{ firewalld_become }}" # <- valid + tasks: + - name: foo + ansible.builtin.debug: + msg: foo! + become: "{{ firewalld_become }}" # <- valid + + - name: foo block + become: "{{ firewalld_become }}" # <- valid + block: + - name: foo + ansible.builtin.debug: + msg: foo! + become: "{{ firewalld_become }}" # <- valid diff --git a/test/schemas/test/playbooks/user_valid.yml b/test/schemas/test/playbooks/user_valid.yml new file mode 100644 index 0000000..bc6a5e6 --- /dev/null +++ b/test/schemas/test/playbooks/user_valid.yml @@ -0,0 +1,3 @@ +- hosts: localhost + user: foo # <-- allowed, alias to remote_user + tasks: [] diff --git a/test/schemas/test/playbooks/var_files.yml b/test/schemas/test/playbooks/var_files.yml new file mode 100644 index 0000000..eef44fd --- /dev/null +++ b/test/schemas/test/playbooks/var_files.yml @@ -0,0 +1,13 @@ +--- +- name: var_files should accept null + hosts: localhost + vars_files: null + +- name: var_files should accept string + hosts: localhost + vars_files: /dev/null + +- name: var_files should accept array[string] + hosts: localhost + vars_files: + - /dev/null diff --git a/test/schemas/test/playbooks/vars/empty_vars.yml b/test/schemas/test/playbooks/vars/empty_vars.yml new file mode 100644 index 0000000..a6e3ce7 --- /dev/null +++ b/test/schemas/test/playbooks/vars/empty_vars.yml @@ -0,0 +1,2 @@ +--- +# Ensure we allow empty var files, matching Ansible behavior diff --git a/test/schemas/test/playbooks/vars/encrypted.yml b/test/schemas/test/playbooks/vars/encrypted.yml new file mode 100644 index 0000000..7808fec --- /dev/null +++ b/test/schemas/test/playbooks/vars/encrypted.yml @@ -0,0 +1,6 @@ +$ANSIBLE_VAULT;1.2;AES256;dev +66373266323161346330626137613862653935343634366636353266323966363665636266363739 +6436363237626633653139636232663131613832336266310a323766643264306436306266663930 +66666238346132373766623932356530333165613835623863653837306130383065323138333034 +6265313861613761620a393663616265633637343534346533366437653839623239396366366330 +3165 diff --git a/test/schemas/test/playbooks/vars/myvars.yml b/test/schemas/test/playbooks/vars/myvars.yml new file mode 100644 index 0000000..8698380 --- /dev/null +++ b/test/schemas/test/playbooks/vars/myvars.yml @@ -0,0 +1,9 @@ +foo: bar +_foo: bar +foo_var_xxx: "{{ sss }}" +in_job: ... +nested: + pear: fruit + apple: fruit +sso_force_handlers: ... +force_handlers_foo: ... diff --git a/test/schemas/test/playbooks/vars_prompt.yml b/test/schemas/test/playbooks/vars_prompt.yml new file mode 100644 index 0000000..1bf65c3 --- /dev/null +++ b/test/schemas/test/playbooks/vars_prompt.yml @@ -0,0 +1,11 @@ +- name: Fixture + hosts: localhost + vars_prompt: + - name: username + prompt: What is your username? + private: false + unsafe: false + + - name: password + prompt: What is your password? + default: "secret" diff --git a/test/schemas/test/playbooks/with_.yml b/test/schemas/test/playbooks/with_.yml new file mode 100644 index 0000000..b3a3748 --- /dev/null +++ b/test/schemas/test/playbooks/with_.yml @@ -0,0 +1,34 @@ +--- +# https://docs.ansible.com/ansible/latest/user_guide/playbooks_loops.html#with-flattened +- hosts: localhost + tasks: + - ansible.builtin.debug: + msg: "{{ item }}" + with_list: [] # <-- valid + - ansible.builtin.debug: + msg: "{{ item }}" + with_items: [] # <-- valid + - ansible.builtin.debug: + msg: "{{ item }}" + with_indexed_items: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_together: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_dict: {} + - ansible.builtin.debug: + msg: "{{ item }}" + with_sequence: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_subelements: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_nested: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_random_choice: [] + - ansible.builtin.debug: + msg: "{{ item }}" + with_fileglob: [] diff --git a/test/schemas/test/reqs2/meta/requirements.yml b/test/schemas/test/reqs2/meta/requirements.yml new file mode 100644 index 0000000..8d55085 --- /dev/null +++ b/test/schemas/test/reqs2/meta/requirements.yml @@ -0,0 +1,7 @@ +# https://docs.ansible.com/ansible/latest/galaxy/user_guide.html +collections: + - doo.bar + - name: geerlingguy.php_roles + version: 0.9.3 + source: https://galaxy.ansible.com +roles: [] diff --git a/test/schemas/test/reqs4/meta/requirements.yml b/test/schemas/test/reqs4/meta/requirements.yml new file mode 100644 index 0000000..8269128 --- /dev/null +++ b/test/schemas/test/reqs4/meta/requirements.yml @@ -0,0 +1,6 @@ +# requirements v1 format +- src: https://github.com/bennojoy/nginx +- src: git+http://bitbucket.org/willthames/git-ansible-galaxy + version: v1.4 + scm: git +- include: foo.yml diff --git a/test/schemas/test/reqs5/meta/requirements.yml b/test/schemas/test/reqs5/meta/requirements.yml new file mode 100644 index 0000000..cd99e3c --- /dev/null +++ b/test/schemas/test/reqs5/meta/requirements.yml @@ -0,0 +1,3 @@ +# Collection without roles +collections: + - name: kubernetes.core diff --git a/test/schemas/test/roles/empty-meta/meta/main.yml b/test/schemas/test/roles/empty-meta/meta/main.yml new file mode 100644 index 0000000..9b6fe15 --- /dev/null +++ b/test/schemas/test/roles/empty-meta/meta/main.yml @@ -0,0 +1 @@ +# this is meta file without any data, ansible-core accepts it diff --git a/test/schemas/test/roles/foo/meta/argument_specs.yml b/test/schemas/test/roles/foo/meta/argument_specs.yml new file mode 100644 index 0000000..c8d8c68 --- /dev/null +++ b/test/schemas/test/roles/foo/meta/argument_specs.yml @@ -0,0 +1,74 @@ +--- +# https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#role-argument-validation +argument_specs: + main: + short_description: The main entry point for the role. + description: "a longer description" + version_added: 1.2.3 + author: Foobar Baz + options: + my_app_int: + type: "int" + required: false + default: 42 + description: "The integer value, defaulting to 42." + no_log: false + version_added: 1.0.0 + + my_app_str: + type: "str" + required: true + description: + - The string value. + - Has some more text. + choices: + - foo + - bar + - baz + + top_level: + type: dict + description: Contains more content. + options: + sub_option: + type: list + elements: int + description: A list of special integers. + choices: + - 1 + - 2 + - 3 + - 123 + + seealso: + - module: community.foo.bar + - module: community.foo.baz + description: Baz bam! + - plugin: community.foo.bam + plugin_type: lookup + - plugin: community.foo.bar + plugin_type: lookup + description: A lookup plugin. + - ref: developer_guide + description: A link into the Ansible documentation. + - link: https://docs.ansible.com/ + name: The Ansible documentation. + description: A link to the Ansible documentation. + + alternate: + short_description: The alternate entry point for the my_app role. + author: + - Foobar Baz + - Bert Foo + options: + my_app_int: + type: "int" + required: false + default: 1024 + description: "The integer value, defaulting to 1024." + + third: + description: + - First paragraph. + - Second paragraph. + options: {} diff --git a/test/schemas/test/roles/foo/meta/main.yml b/test/schemas/test/roles/foo/meta/main.yml new file mode 100644 index 0000000..b84b10c --- /dev/null +++ b/test/schemas/test/roles/foo/meta/main.yml @@ -0,0 +1,46 @@ +collections: + - foo.bar +dependencies: + - name: ansible-role-foo + version: "1.0" + - name: ansible-role-bar + version: "1.0" + # from Bitbucket + - src: git+http://bitbucket.org/willthames/git-ansible-galaxy + version: v1.4 + + # from Bitbucket, alternative syntax and caveats + - src: http://bitbucket.org/willthames/hg-ansible-galaxy + scm: hg + + # from galaxy + - src: community.molecule + + # from GitHub + - src: https://github.com/bennojoy/nginx + + # from GitHub, overriding the name and specifying a specific tag + - src: https://github.com/bennojoy/nginx + version: master + name: nginx_role + + # from GitLab or other git-based scm + - src: git@gitlab.company.com:my-group/my-repo.git + scm: git + version: "0.1" # quoted, so YAML doesn't parse this as a floating-point value + + # from a web server, where the role is packaged in a tar.gz + - src: https://some.webserver.example.com/files/master.tar.gz + name: http-role + +galaxy_info: + author: John Doe + company: foo + description: foo + license: MIT + min_ansible_version: "2.9" + # standalone: true + platforms: + - name: Alpine + versions: + - all diff --git a/test/schemas/test/roles/foo/meta/runtime.yml b/test/schemas/test/roles/foo/meta/runtime.yml new file mode 100644 index 0000000..561e446 --- /dev/null +++ b/test/schemas/test/roles/foo/meta/runtime.yml @@ -0,0 +1,39 @@ +# Based on https://docs.ansible.com/ansible/devel/dev_guide/developing_collections_structure.html#meta-directory +requires_ansible: ">=2.10,<2.11" +plugin_routing: + inventory: + kubevirt: + redirect: community.general.kubevirt + my_inventory: + tombstone: + removal_version: "2.0.0" + warning_text: my_inventory has been removed. Please use other_inventory instead. + modules: + my_module: + deprecation: + removal_date: "2021-11-30" + warning_text: + my_module will be removed in a future release of this collection. Use + another.collection.new_module instead. + redirect: another.collection.new_module + podman_image: + redirect: containers.podman.podman_image + module_utils: + ec2: + redirect: amazon.aws.ec2 + util_dir.subdir.my_util: + redirect: namespace.name.my_util +import_redirection: + ansible.module_utils.old_utility: + redirect: ansible_collections.namespace_name.collection_name.plugins.module_utils.new_location +action_groups: + groupname: + # The special metadata dictionary. All action/module names should be strings. + - metadata: + extend_group: + - another.collection.groupname + - another_group + - my_action + another_group: + - my_module + - another.collection.another_module diff --git a/test/schemas/test/roles/maximum/meta/main.yml b/test/schemas/test/roles/maximum/meta/main.yml new file mode 100644 index 0000000..10c57b1 --- /dev/null +++ b/test/schemas/test/roles/maximum/meta/main.yml @@ -0,0 +1,20 @@ +allow_duplicates: true +galaxy_info: + author: John Doe + standalone: true # v1 role meta (standalone) + description: maximum + min_ansible_version: "2.9" + company: foo + license: MIT + galaxy_tags: # ensure galaxy_tags is allowed + - database + platforms: + - name: Alpine + versions: + - all +dependencies: + - role: foo + vars: {} + when: + - foo + - bar diff --git a/test/schemas/test/roles/meta-tags/meta/main.yml b/test/schemas/test/roles/meta-tags/meta/main.yml new file mode 100644 index 0000000..4abba23 --- /dev/null +++ b/test/schemas/test/roles/meta-tags/meta/main.yml @@ -0,0 +1,25 @@ +--- +# https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#role-dependencies +dependencies: + - role: foo + tags: fruit # simple string allowed + - role: bar + tags: # array of strings allowed + - apple + - orange + - role: requires_sudo + become: true + - role: role_with_condition + when: inventory_hostname == "foo" + - role: another_role + # https://docs.ansible.com/ansible/latest/user_guide/playbooks_reuse_roles.html#passing-different-parameters + something_that_counts_as_role_parameter: ... + vars: + "foo": bar +galaxy_info: + author: John Doe + standalone: true + description: foo + license: MIT + min_ansible_version: "2.10" + platforms: [] diff --git a/test/schemas/test/roles/ns/meta/main.yml b/test/schemas/test/roles/ns/meta/main.yml new file mode 100644 index 0000000..0ea558c --- /dev/null +++ b/test/schemas/test/roles/ns/meta/main.yml @@ -0,0 +1,13 @@ +--- +galaxy_info: + author: John Doe + standalone: true + description: foo + min_ansible_version: "2.9" + namespace: foo_bar + company: foo + license: MIT + platforms: + - name: Alpine + versions: + - all diff --git a/test/schemas/test/roles/v1_role/meta/main.yml b/test/schemas/test/roles/v1_role/meta/main.yml new file mode 100644 index 0000000..a74eb47 --- /dev/null +++ b/test/schemas/test/roles/v1_role/meta/main.yml @@ -0,0 +1,12 @@ +--- +galaxy_info: + standalone: true + author: foo-bar # <-- that is a valid author name because is a valid github username + description: foo + min_ansible_version: "2.9" + company: foo + license: MIT + platforms: + - name: Alpine + versions: + - all diff --git a/test/schemas/test/tests/integration/rom_role/meta/main.yml b/test/schemas/test/tests/integration/rom_role/meta/main.yml new file mode 100644 index 0000000..c1409c4 --- /dev/null +++ b/test/schemas/test/tests/integration/rom_role/meta/main.yml @@ -0,0 +1,5 @@ +--- +dependencies: [] +galaxy_info: + standalone: false + description: foo diff --git a/test/schemas/tsconfig.json b/test/schemas/tsconfig.json new file mode 100644 index 0000000..fe51c68 --- /dev/null +++ b/test/schemas/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "declaration": true, + "esModuleInterop": true, + "lib": ["es5", "es2015.promise"], + "module": "commonjs", + "moduleResolution": "node", + "outDir": "../lib/umd", + "resolveJsonModule": true, + "sourceMap": true, + "strict": true, + "stripInternal": true, + "target": "es5" + }, + "exclude": ["node_modules"], + "include": ["src/**/*"] +} diff --git a/test/test_ansiblelintrule.py b/test/test_ansiblelintrule.py new file mode 100644 index 0000000..4afcd59 --- /dev/null +++ b/test/test_ansiblelintrule.py @@ -0,0 +1,29 @@ +"""Generic tests for AnsibleLintRule class.""" +from __future__ import annotations + +from typing import Any + +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from ansiblelint.config import options +from ansiblelint.rules import AnsibleLintRule + + +def test_unjinja() -> None: + """Verify that unjinja understands nested mustache.""" + text = "{{ a }} {% b %} {# try to confuse parsing inside a comment { {{}} } #}" + output = "JINJA_EXPRESSION JINJA_STATEMENT JINJA_COMMENT" + assert AnsibleLintRule.unjinja(text) == output + + +@pytest.mark.parametrize("rule_config", ({}, {"foo": True, "bar": 1})) +def test_rule_config(rule_config: dict[str, Any], monkeypatch: MonkeyPatch) -> None: + """Check that a rule config is inherited from options.""" + rule_id = "rule-0" + monkeypatch.setattr(AnsibleLintRule, "id", rule_id) + monkeypatch.setitem(options.rules, rule_id, rule_config) + + rule = AnsibleLintRule() + assert set(rule.rule_config.items()) == set(rule_config.items()) + assert all(rule.get_config(k) == v for k, v in rule_config.items()) diff --git a/test/test_ansiblesyntax.py b/test/test_ansiblesyntax.py new file mode 100644 index 0000000..f71a525 --- /dev/null +++ b/test/test_ansiblesyntax.py @@ -0,0 +1,19 @@ +"""Test Ansible Syntax. + +This module contains tests that validate that linter does not produce errors +when encountering what counts as valid Ansible syntax. +""" +from ansiblelint.testing import RunFromText + +PB_WITH_NULL_TASKS = """\ +--- +- name: Fixture for test_null_tasks + hosts: all + tasks: +""" + + +def test_null_tasks(default_text_runner: RunFromText) -> None: + """Assure we do not fail when encountering null tasks.""" + results = default_text_runner.run_playbook(PB_WITH_NULL_TASKS) + assert not results diff --git a/test/test_app.py b/test/test_app.py new file mode 100644 index 0000000..cbbc6d5 --- /dev/null +++ b/test/test_app.py @@ -0,0 +1,21 @@ +"""Test for app module.""" +from pathlib import Path + +from ansiblelint.file_utils import Lintable +from ansiblelint.testing import run_ansible_lint + + +def test_generate_ignore(tmp_path: Path) -> None: + """Validate that --generate-ignore dumps expected ignore to the file.""" + lintable = Lintable(tmp_path / "vars.yaml") + lintable.content = "foo: bar\nfoo: baz\n" + lintable.write(force=True) + assert not (tmp_path / ".ansible-lint-ignore").exists() + result = run_ansible_lint(lintable.filename, "--generate-ignore", cwd=str(tmp_path)) + assert result.returncode == 2 + assert (tmp_path / ".ansible-lint-ignore").exists() + with open(tmp_path / ".ansible-lint-ignore", encoding="utf-8") as f: + assert "vars.yaml yaml[key-duplicates]\n" in f.readlines() + # Run again and now we expect to succeed as we have an ignore file. + result = run_ansible_lint(lintable.filename, cwd=str(tmp_path)) + assert result.returncode == 0 diff --git a/test/test_cli_role_paths.py b/test/test_cli_role_paths.py new file mode 100644 index 0000000..b70191d --- /dev/null +++ b/test/test_cli_role_paths.py @@ -0,0 +1,186 @@ +"""Tests related to role paths.""" +from __future__ import annotations + +import os +from pathlib import Path + +import pytest + +from ansiblelint.testing import run_ansible_lint +from ansiblelint.text import strip_ansi_escape + + +@pytest.fixture(name="local_test_dir") +def fixture_local_test_dir() -> str: + """Fixture to return local test directory.""" + return os.path.realpath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "examples") + ) + + +def test_run_single_role_path_no_trailing_slash_module(local_test_dir: str) -> None: + """Test that a role path without a trailing slash is accepted.""" + cwd = local_test_dir + role_path = "roles/test-role" + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_single_role_path_no_trailing_slash_script(local_test_dir: str) -> None: + """Test that a role path without a trailing slash is accepted.""" + cwd = local_test_dir + role_path = "roles/test-role" + + result = run_ansible_lint(role_path, cwd=cwd, executable="ansible-lint") + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_single_role_path_with_trailing_slash(local_test_dir: str) -> None: + """Test that a role path with a trailing slash is accepted.""" + cwd = local_test_dir + role_path = "roles/test-role/" + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_multiple_role_path_no_trailing_slash(local_test_dir: str) -> None: + """Test that multiple roles paths without a trailing slash are accepted.""" + cwd = local_test_dir + role_path = "roles/test-role" + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_multiple_role_path_with_trailing_slash(local_test_dir: str) -> None: + """Test that multiple roles paths without a trailing slash are accepted.""" + cwd = local_test_dir + role_path = "roles/test-role/" + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_inside_role_dir(local_test_dir: str) -> None: + """Tests execution from inside a role.""" + cwd = os.path.join(local_test_dir, "roles/test-role/") + role_path = "." + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_role_three_dir_deep(local_test_dir: str) -> None: + """Tests execution from deep inside a role.""" + cwd = local_test_dir + role_path = "testproject/roles/test-role" + + result = run_ansible_lint(role_path, cwd=cwd) + assert "Use shell only when shell functionality is required" in result.stdout + + +def test_run_playbook(local_test_dir: str) -> None: + """Call ansible-lint the way molecule does.""" + cwd = os.path.abspath(os.path.join(local_test_dir, "roles/test-role")) + lintable = "molecule/default/include-import-role.yml" + role_path = str(Path(cwd).parent.resolve()) + + env = os.environ.copy() + env["ANSIBLE_ROLES_PATH"] = role_path + + result = run_ansible_lint(lintable, cwd=cwd, env=env) + assert "Use shell only when shell functionality is required" in result.stdout + + +@pytest.mark.parametrize( + ("args", "expected_msg"), + ( + pytest.param( + [], "role-name: Role name invalid-name does not match", id="normal" + ), + pytest.param(["--skip-list", "role-name"], "", id="skipped"), + ), +) +def test_run_role_name_invalid( + local_test_dir: str, args: list[str], expected_msg: str +) -> None: + """Test run with a role with invalid name.""" + cwd = local_test_dir + role_path = "roles/invalid-name" + + result = run_ansible_lint(*args, role_path, cwd=cwd) + assert result.returncode == (2 if expected_msg else 0), result + if expected_msg: + assert expected_msg in strip_ansi_escape(result.stdout) + + +def test_run_role_name_with_prefix(local_test_dir: str) -> None: + """Test run where role path has a prefix.""" + cwd = local_test_dir + role_path = "roles/ansible-role-foo" + + result = run_ansible_lint("-v", role_path, cwd=cwd) + assert len(result.stdout) == 0 + assert result.returncode == 0 + + +def test_run_role_name_from_meta(local_test_dir: str) -> None: + """Test running from inside meta folder.""" + cwd = local_test_dir + role_path = "roles/valid-due-to-meta" + + result = run_ansible_lint("-v", role_path, cwd=cwd) + assert len(result.stdout) == 0 + assert result.returncode == 0 + + +def test_run_invalid_role_name_from_meta(local_test_dir: str) -> None: + """Test invalid role from inside meta folder.""" + cwd = local_test_dir + role_path = "roles/invalid_due_to_meta" + + result = run_ansible_lint(role_path, cwd=cwd) + assert ( + "role-name: Role name invalid-due-to-meta does not match" + in strip_ansi_escape(result.stdout) + ) + + +def test_run_single_role_path_with_roles_path_env(local_test_dir: str) -> None: + """Test for role name collision with ANSIBLE_ROLES_PATH. + + Test if ansible-lint chooses the role in the current directory when the role + specified as parameter exists in the current directory and the ANSIBLE_ROLES_PATH. + """ + cwd = local_test_dir + role_path = "roles/test-role" + + env = os.environ.copy() + env["ANSIBLE_ROLES_PATH"] = os.path.realpath(os.path.join(cwd, "../examples/roles")) + + result = run_ansible_lint(role_path, cwd=cwd, env=env) + assert "Use shell only when shell functionality is required" in result.stdout + + +@pytest.mark.parametrize( + ("result", "env"), + ((True, {"GITHUB_ACTIONS": "true", "GITHUB_WORKFLOW": "foo"}), (False, None)), + ids=("on", "off"), +) +def test_run_playbook_github(result: bool, env: dict[str, str]) -> None: + """Call ansible-lint simulating GitHub Actions environment.""" + cwd = str(Path(__file__).parent.parent.resolve()) + role_path = "examples/playbooks/example.yml" + + if env is None: + env = {} + env["PATH"] = os.environ["PATH"] + result_gh = run_ansible_lint(role_path, cwd=cwd, env=env) + + expected = ( + "::error file=examples/playbooks/example.yml,line=44,severity=VERY_LOW,title=package-latest::" + "Package installs should not use latest" + ) + assert (expected in result_gh.stdout) is result diff --git a/test/test_commandline_invocations_same_as_config.py b/test/test_commandline_invocations_same_as_config.py new file mode 100644 index 0000000..eecc0d9 --- /dev/null +++ b/test/test_commandline_invocations_same_as_config.py @@ -0,0 +1,195 @@ +"""Test cli arguments and config.""" +from __future__ import annotations + +import os +from pathlib import Path + +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from ansiblelint import cli + + +@pytest.fixture(name="base_arguments") +def fixture_base_arguments() -> list[str]: + """Define reusable base arguments for tests in current module.""" + return ["../test/skiptasks.yml"] + + +@pytest.mark.parametrize( + ("args", "config"), + ( + (["-p"], "test/fixtures/parseable.yml"), + (["-q"], "test/fixtures/quiet.yml"), + (["-r", "test/fixtures/rules/"], "test/fixtures/rulesdir.yml"), + (["-R", "-r", "test/fixtures/rules/"], "test/fixtures/rulesdir-defaults.yml"), + (["-s"], "test/fixtures/strict.yml"), + (["-t", "skip_ansible_lint"], "test/fixtures/tags.yml"), + (["-v"], "test/fixtures/verbosity.yml"), + (["-x", "bad_tag"], "test/fixtures/skip-tags.yml"), + (["--exclude", "test/"], "test/fixtures/exclude-paths.yml"), + (["--show-relpath"], "test/fixtures/show-abspath.yml"), + ([], "test/fixtures/show-relpath.yml"), + ), +) +def test_ensure_config_are_equal( + base_arguments: list[str], args: list[str], config: str +) -> None: + """Check equality of the CLI options to config files.""" + command = base_arguments + args + cli_parser = cli.get_cli_parser() + + options = cli_parser.parse_args(command) + file_config = cli.load_config(config) + + for key, val in file_config.items(): + # config_file does not make sense in file_config + if key == "config_file": + continue + + if key in {"exclude_paths", "rulesdir"}: + val = [Path(p) for p in val] + assert val == getattr(options, key) + + +@pytest.mark.parametrize( + ("with_base", "args", "config"), + ( + (True, ["--write"], "test/fixtures/config-with-write-all.yml"), + (True, ["--write=all"], "test/fixtures/config-with-write-all.yml"), + (True, ["--write", "all"], "test/fixtures/config-with-write-all.yml"), + (True, ["--write=none"], "test/fixtures/config-with-write-none.yml"), + (True, ["--write", "none"], "test/fixtures/config-with-write-none.yml"), + ( + True, + ["--write=rule-tag,rule-id"], + "test/fixtures/config-with-write-subset.yml", + ), + ( + True, + ["--write", "rule-tag,rule-id"], + "test/fixtures/config-with-write-subset.yml", + ), + ( + True, + ["--write", "rule-tag", "--write", "rule-id"], + "test/fixtures/config-with-write-subset.yml", + ), + ( + False, + ["--write", "examples/playbooks/example.yml"], + "test/fixtures/config-with-write-all.yml", + ), + ( + False, + ["--write", "examples/playbooks/example.yml", "non-existent.yml"], + "test/fixtures/config-with-write-all.yml", + ), + ), +) +def test_ensure_write_cli_does_not_consume_lintables( + base_arguments: list[str], with_base: bool, args: list[str], config: str +) -> None: + """Check equality of the CLI --write options to config files.""" + cli_parser = cli.get_cli_parser() + + command = base_arguments + args if with_base else args + options = cli_parser.parse_args(command) + file_config = cli.load_config(config) + + file_value = file_config.get("write_list") + orig_cli_value = getattr(options, "write_list") + cli_value = cli.WriteArgAction.merge_write_list_config( + from_file=[], from_cli=orig_cli_value + ) + assert file_value == cli_value + + +def test_config_can_be_overridden(base_arguments: list[str]) -> None: + """Check that config can be overridden from CLI.""" + no_override = cli.get_config(base_arguments + ["-t", "bad_tag"]) + + overridden = cli.get_config( + base_arguments + ["-t", "bad_tag", "-c", "test/fixtures/tags.yml"] + ) + + assert no_override.tags + ["skip_ansible_lint"] == overridden.tags + + +def test_different_config_file(base_arguments: list[str]) -> None: + """Ensures an alternate config_file can be used.""" + diff_config = cli.get_config( + base_arguments + ["-c", "test/fixtures/ansible-config.yml"] + ) + no_config = cli.get_config(base_arguments + ["-v"]) + + assert diff_config.verbosity == no_config.verbosity + + +def test_expand_path_user_and_vars_config_file(base_arguments: list[str]) -> None: + """Ensure user and vars are expanded when specified as exclude_paths.""" + config1 = cli.get_config( + base_arguments + ["-c", "test/fixtures/exclude-paths-with-expands.yml"] + ) + config2 = cli.get_config( + base_arguments + + ["--exclude", "~/.ansible/roles", "--exclude", "$HOME/.ansible/roles"] + ) + + assert str(config1.exclude_paths[0]) == os.path.expanduser("~/.ansible/roles") + assert str(config2.exclude_paths[0]) == os.path.expanduser("~/.ansible/roles") + assert str(config1.exclude_paths[1]) == os.path.expandvars("$HOME/.ansible/roles") + assert str(config2.exclude_paths[1]) == os.path.expandvars("$HOME/.ansible/roles") + + +def test_path_from_config_do_not_depend_on_cwd( + monkeypatch: MonkeyPatch, +) -> None: # Issue 572 + """Check that config-provided paths are decoupled from CWD.""" + config1 = cli.load_config("test/fixtures/config-with-relative-path.yml") + monkeypatch.chdir("test") + config2 = cli.load_config("fixtures/config-with-relative-path.yml") + + assert config1["exclude_paths"].sort() == config2["exclude_paths"].sort() + + +def test_path_from_cli_depend_on_cwd( + base_arguments: list[str], monkeypatch: MonkeyPatch +) -> None: + """Check that CLI-provided paths are relative to CWD.""" + # Issue 572 + arguments = base_arguments + [ + "--exclude", + "test/fixtures/config-with-relative-path.yml", + ] + + options1 = cli.get_cli_parser().parse_args(arguments) + assert "test/test" not in str(options1.exclude_paths[0]) + + test_dir = "test" + monkeypatch.chdir(test_dir) + options2 = cli.get_cli_parser().parse_args(arguments) + + assert "test/test" in str(options2.exclude_paths[0]) + + +@pytest.mark.parametrize( + "config_file", + ( + pytest.param("test/fixtures/ansible-config-invalid.yml", id="invalid"), + pytest.param("/dev/null/ansible-config-missing.yml", id="missing"), + ), +) +def test_config_failure(base_arguments: list[str], config_file: str) -> None: + """Ensures specific config files produce error code 3.""" + with pytest.raises(SystemExit, match="^3$"): + cli.get_config(base_arguments + ["-c", config_file]) + + +def test_extra_vars_loaded(base_arguments: list[str]) -> None: + """Ensure ``extra_vars`` option is loaded from file config.""" + config = cli.get_config( + base_arguments + ["-c", "test/fixtures/config-with-extra-vars.yml"] + ) + + assert config.extra_vars == {"foo": "bar", "knights_favorite_word": "NI"} diff --git a/test/test_constants.py b/test/test_constants.py new file mode 100644 index 0000000..52b297a --- /dev/null +++ b/test/test_constants.py @@ -0,0 +1,9 @@ +"""Tests for constants module.""" +from ansiblelint.constants import States + + +def test_states() -> None: + """Test that states are evaluated as boolean false.""" + assert bool(States.NOT_LOADED) is False + assert bool(States.LOAD_FAILED) is False + assert bool(States.UNKNOWN_DATA) is False diff --git a/test/test_dependencies_in_meta.py b/test/test_dependencies_in_meta.py new file mode 100644 index 0000000..44007b7 --- /dev/null +++ b/test/test_dependencies_in_meta.py @@ -0,0 +1,10 @@ +"""Tests about dependencies in meta.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +def test_external_dependency_is_ok(default_rules_collection: RulesCollection) -> None: + """Check that external dep in role meta is not a violation.""" + playbook_path = "examples/roles/dependency_in_meta/meta/main.yml" + good_runner = Runner(playbook_path, rules=default_rules_collection) + assert [] == good_runner.run() diff --git a/test/test_eco.py b/test/test_eco.py new file mode 100644 index 0000000..6463b71 --- /dev/null +++ b/test/test_eco.py @@ -0,0 +1,88 @@ +"""Test a set of 3rd party Ansible repositories for possible regressions.""" +import os +import pathlib +import re +import shlex +import subprocess + +import pytest + +from ansiblelint.testing import run_ansible_lint + +eco_repos = { + "bootstrap": "https://github.com/robertdebock/ansible-role-bootstrap", + "cisco.nxos": "https://github.com/ansible-collections/cisco.nxos", + "colsystem": "https://github.com/devroles/ansible_collection_system", + "debops": "https://github.com/debops/debops", + "docker-rootless": "https://github.com/konstruktoid/ansible-docker-rootless", + "hardening": "https://github.com/konstruktoid/ansible-role-hardening", + "mysql": "https://github.com/geerlingguy/ansible-role-mysql.git", +} + + +def sanitize_output(text: str) -> str: + """Make the output less likely to vary between runs or minor changes.""" + # replace full path to home directory with ~. + result = text.replace(os.path.expanduser("~"), "~") + # removes warning related to PATH alteration + result = re.sub( + r"^WARNING: PATH altered to include.+\n", "", result, flags=re.MULTILINE + ) + # replace venv path + result = result.replace(".tox/venv", ".tox/eco") + + return result + + +@pytest.mark.eco() +@pytest.mark.parametrize(("repo"), (eco_repos.keys())) +def test_eco(repo: str) -> None: + """Test a set of 3rd party Ansible repositories for possible regressions.""" + url = eco_repos[repo] + cache_dir = os.path.expanduser("~/.cache/ansible-lint-eco") + my_dir = (pathlib.Path(__file__).parent / "eco").resolve() + os.makedirs(cache_dir, exist_ok=True) + # clone repo + if os.path.exists(f"{cache_dir}/{repo}/.git"): + subprocess.run("git pull", cwd=f"{cache_dir}/{repo}", shell=True, check=True) + else: + subprocess.run( + f"git clone --recurse-submodules {url} {cache_dir}/{repo}", + shell=True, + check=True, + ) + # run ansible lint and paths from user home in order to produce + # consistent results regardless on its location. + for step in ["before", "after"]: + args = ["-f", "pep8"] + executable = ( + "ansible-lint" + if step == "after" + else f"{pathlib.Path(__file__).parent.parent}/.tox/venv/bin/ansible-lint" + ) + result = run_ansible_lint( + *args, + executable=executable, + cwd=f"{cache_dir}/{repo}", + ) + + # Ensure that cmd looks the same for later diff, even if the path was different + result.args[0] = "ansible-lint" + # sort stderr because parallel runs can + result.stderr = "\n".join(sorted(result.stderr.split("\n"))) + + result_txt = f"CMD: {shlex.join(result.args)}\n\nRC: {result.returncode}\n\nSTDERR:\n{result.stderr}\n\nSTDOUT:\n{result.stdout}" + + os.makedirs(f"{my_dir}/{step}", exist_ok=True) + with open(f"{my_dir}/{step}/{repo}.result", "w", encoding="utf-8") as f: + f.write(sanitize_output(result_txt)) + + # fail if result is different than our expected one + result = subprocess.run( + f"git --no-pager diff --exit-code --no-index test/eco/before/{repo}.result test/eco/after/{repo}.result", + shell=True, + check=False, + capture_output=True, + text=True, + ) + assert result.returncode == 0, result_txt + f"\nDIFF:\n{result.stdout}" diff --git a/test/test_examples.py b/test/test_examples.py new file mode 100644 index 0000000..19cb25b --- /dev/null +++ b/test/test_examples.py @@ -0,0 +1,64 @@ +"""Assure samples produced desire outcomes.""" +import pytest + +from ansiblelint.app import get_app +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner +from ansiblelint.testing import run_ansible_lint + + +def test_example(default_rules_collection: RulesCollection) -> None: + """example.yml is expected to have exact number of errors inside.""" + result = Runner( + "examples/playbooks/example.yml", rules=default_rules_collection + ).run() + assert len(result) == 22 + + +@pytest.mark.parametrize( + ("filename", "line", "column"), + ( + pytest.param( + "examples/playbooks/syntax-error-string.yml", 6, 7, id="syntax-error" + ), + pytest.param("examples/playbooks/syntax-error.yml", 2, 3, id="syntax-error"), + ), +) +def test_example_syntax_error( + default_rules_collection: RulesCollection, filename: str, line: int, column: int +) -> None: + """Validates that loading valid YAML string produce error.""" + result = Runner(filename, rules=default_rules_collection).run() + assert len(result) == 1 + assert result[0].rule.id == "syntax-check" + # This also ensures that line and column numbers start at 1, so they + # match what editors will show (or output from other linters) + assert result[0].linenumber == line + assert result[0].column == column + + +def test_example_custom_module(default_rules_collection: RulesCollection) -> None: + """custom_module.yml is expected to pass.""" + app = get_app() + result = Runner( + "examples/playbooks/custom_module.yml", rules=default_rules_collection + ).run() + assert len(result) == 0, f"{app.runtime.cache_dir}" + + +def test_full_vault(default_rules_collection: RulesCollection) -> None: + """custom_module.yml is expected to pass.""" + result = Runner( + "examples/playbooks/vars/not_decryptable.yml", rules=default_rules_collection + ).run() + assert len(result) == 0 + + +def test_custom_kinds() -> None: + """Check if user defined kinds are used.""" + result = run_ansible_lint("-vv", "--offline", "examples/other/") + assert result.returncode == 0 + # .yaml-too is not a recognized extension and unless is manually defined + # in our ansible-lint config, the test would not identify it as yaml file. + assert "Examining examples/other/some.yaml-too of type yaml" in result.stderr + assert "Examining examples/other/some.j2.yaml of type jinja2" in result.stderr diff --git a/test/test_file_path_evaluation.py b/test/test_file_path_evaluation.py new file mode 100644 index 0000000..15692e4 --- /dev/null +++ b/test/test_file_path_evaluation.py @@ -0,0 +1,126 @@ +"""Testing file path evaluation when using import_tasks / include_tasks.""" +from __future__ import annotations + +import textwrap +from pathlib import Path + +import pytest + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +LAYOUT_IMPORTS: dict[str, str] = { + "main.yml": textwrap.dedent( + """\ + --- + - name: Fixture + hosts: target + gather_facts: false + tasks: + - name: From main import task 1 + ansible.builtin.import_tasks: tasks/task_1.yml + """ + ), + "tasks/task_1.yml": textwrap.dedent( + """\ + --- + - name: task_1 | From task 1 import task 2 + ansible.builtin.import_tasks: tasks/task_2.yml + """ + ), + "tasks/task_2.yml": textwrap.dedent( + """\ + --- + - name: task_2 | From task 2 import subtask 1 + ansible.builtin.import_tasks: tasks/subtasks/subtask_1.yml + """ + ), + "tasks/subtasks/subtask_1.yml": textwrap.dedent( + """\ + --- + - name: subtask_1 | From subtask 1 import subtask 2 + ansible.builtin.import_tasks: tasks/subtasks/subtask_2.yml + """ + ), + "tasks/subtasks/subtask_2.yml": textwrap.dedent( + """\ + --- + - name: subtask_2 | From subtask 2 do something + debug: # <-- expected to raise fqcn[action-core] + msg: | + Something... + """ + ), +} + +LAYOUT_INCLUDES: dict[str, str] = { + "main.yml": textwrap.dedent( + """\ + --- + - name: Fixture + hosts: target + gather_facts: false + tasks: + - name: From main import task 1 + ansible.builtin.include_tasks: tasks/task_1.yml + """ + ), + "tasks/task_1.yml": textwrap.dedent( + """\ + --- + - name: task_1 | From task 1 import task 2 + ansible.builtin.include_tasks: tasks/task_2.yml + """ + ), + "tasks/task_2.yml": textwrap.dedent( + """\ + --- + - name: task_2 | From task 2 import subtask 1 + ansible.builtin.include_tasks: tasks/subtasks/subtask_1.yml + """ + ), + "tasks/subtasks/subtask_1.yml": textwrap.dedent( + """\ + --- + - name: subtask_1 | From subtask 1 import subtask 2 + ansible.builtin.include_tasks: tasks/subtasks/subtask_2.yml + """ + ), + "tasks/subtasks/subtask_2.yml": textwrap.dedent( + """\ + --- + - name: subtask_2 | From subtask 2 do something + debug: # <-- expected to raise fqcn[action-core] + msg: | + Something... + """ + ), +} + + +@pytest.mark.parametrize( + "ansible_project_layout", + ( + pytest.param(LAYOUT_IMPORTS, id="using only import_tasks"), + pytest.param(LAYOUT_INCLUDES, id="using only include_tasks"), + ), +) +def test_file_path_evaluation( + tmp_path: Path, + default_rules_collection: RulesCollection, + ansible_project_layout: dict[str, str], +) -> None: + """Test file path evaluation when using import_tasks / include_tasks in the project. + + The goal of this test is to verify our ability to find errors from within + nested includes. + """ + for file_path, file_content in ansible_project_layout.items(): + full_path = tmp_path / file_path + full_path.parent.mkdir(parents=True, exist_ok=True) + full_path.write_text(file_content) + + result = Runner(str(tmp_path), rules=default_rules_collection).run() + + assert len(result) == 1 + assert result[0].rule.id == "fqcn" diff --git a/test/test_file_utils.py b/test/test_file_utils.py new file mode 100644 index 0000000..b427fa8 --- /dev/null +++ b/test/test_file_utils.py @@ -0,0 +1,491 @@ +"""Tests for file utility functions.""" +from __future__ import annotations + +import os +import time +from argparse import Namespace +from pathlib import Path +from typing import Any + +import pytest +from _pytest.capture import CaptureFixture +from _pytest.logging import LogCaptureFixture +from _pytest.monkeypatch import MonkeyPatch + +from ansiblelint import cli, file_utils +from ansiblelint.__main__ import initialize_logger +from ansiblelint.constants import FileType +from ansiblelint.file_utils import ( + Lintable, + expand_path_vars, + expand_paths_vars, + guess_project_dir, + normpath, + normpath_path, +) +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +from .conftest import cwd + + +@pytest.mark.parametrize( + ("path", "expected"), + ( + pytest.param(Path("a/b/../"), "a", id="pathlib.Path"), + pytest.param("a/b/../", "a", id="str"), + pytest.param("", ".", id="empty"), + pytest.param(".", ".", id="empty"), + ), +) +def test_normpath(path: str, expected: str) -> None: + """Ensure that relative parent dirs are normalized in paths.""" + assert normpath(path) == expected + + +def test_expand_path_vars(monkeypatch: MonkeyPatch) -> None: + """Ensure that tilde and env vars are expanded in paths.""" + test_path = "/test/path" + monkeypatch.setenv("TEST_PATH", test_path) + assert expand_path_vars("~") == os.path.expanduser("~") + assert expand_path_vars("$TEST_PATH") == test_path + + +@pytest.mark.parametrize( + ("test_path", "expected"), + ( + pytest.param(Path("$TEST_PATH"), "/test/path", id="pathlib.Path"), + pytest.param("$TEST_PATH", "/test/path", id="str"), + pytest.param(" $TEST_PATH ", "/test/path", id="stripped-str"), + pytest.param("~", os.path.expanduser("~"), id="home"), + ), +) +def test_expand_paths_vars( + test_path: str | Path, expected: str, monkeypatch: MonkeyPatch +) -> None: + """Ensure that tilde and env vars are expanded in paths lists.""" + monkeypatch.setenv("TEST_PATH", "/test/path") + assert expand_paths_vars([test_path]) == [expected] # type: ignore + + +@pytest.mark.parametrize( + ("reset_env_var", "message_prefix"), + ( + # simulate absence of git command + ("PATH", "Failed to locate command: "), + # simulate a missing git repo + ("GIT_DIR", "Looking up for files"), + ), + ids=("no-git-cli", "outside-git-repo"), +) +def test_discover_lintables_git_verbose( + reset_env_var: str, + message_prefix: str, + monkeypatch: MonkeyPatch, + caplog: LogCaptureFixture, +) -> None: + """Ensure that autodiscovery lookup failures are logged.""" + options = cli.get_config(["-v"]) + initialize_logger(options.verbosity) + monkeypatch.setenv(reset_env_var, "") + file_utils.discover_lintables(options) + + assert any(m[2].startswith("Looking up for files") for m in caplog.record_tuples) + assert any(m.startswith(message_prefix) for m in caplog.messages) + + +@pytest.mark.parametrize( + "is_in_git", + (True, False), + ids=("in Git", "outside Git"), +) +def test_discover_lintables_silent( + is_in_git: bool, monkeypatch: MonkeyPatch, capsys: CaptureFixture[str] +) -> None: + """Verify that no stderr output is displayed while discovering yaml files. + + (when the verbosity is off, regardless of the Git or Git-repo presence) + + Also checks expected number of files are detected. + """ + options = cli.get_config([]) + test_dir = Path(__file__).resolve().parent + lint_path = test_dir / ".." / "examples" / "roles" / "test-role" + if not is_in_git: + monkeypatch.setenv("GIT_DIR", "") + + yaml_count = len(list(lint_path.glob("**/*.yml"))) + len( + list(lint_path.glob("**/*.yaml")) + ) + + monkeypatch.chdir(str(lint_path)) + files = file_utils.discover_lintables(options) + stderr = capsys.readouterr().err + assert not stderr, "No stderr output is expected when the verbosity is off" + assert ( + len(files) == yaml_count + ), "Expected to find {yaml_count} yaml files in {lint_path}".format_map( + locals(), + ) + + +def test_discover_lintables_umlaut(monkeypatch: MonkeyPatch) -> None: + """Verify that filenames containing German umlauts are not garbled by the discover_lintables.""" + options = cli.get_config([]) + test_dir = Path(__file__).resolve().parent + lint_path = test_dir / ".." / "examples" / "playbooks" + + monkeypatch.chdir(str(lint_path)) + files = file_utils.discover_lintables(options) + assert '"with-umlaut-\\303\\244.yml"' not in files + assert "with-umlaut-ä.yml" in files + + +@pytest.mark.parametrize( + ("path", "kind"), + ( + pytest.param("tasks/run_test_playbook.yml", "tasks", id="0"), + pytest.param("foo/playbook.yml", "playbook", id="1"), + pytest.param("playbooks/foo.yml", "playbook", id="2"), + pytest.param("examples/roles/foo.yml", "yaml", id="3"), + # the only yml file that is not a playbook inside molecule/ folders + pytest.param( + "examples/.config/molecule/config.yml", "yaml", id="4" + ), # molecule shared config + pytest.param( + "test/schemas/test/molecule/cluster/base.yml", "yaml", id="5" + ), # molecule scenario base config + pytest.param( + "test/schemas/test/molecule/cluster/molecule.yml", "yaml", id="6" + ), # molecule scenario config + pytest.param( + "test/schemas/test/molecule/cluster/foobar.yml", "playbook", id="7" + ), # custom playbook name + pytest.param( + "test/schemas/test/molecule/cluster/converge.yml", "playbook", id="8" + ), # common playbook name + pytest.param( + "roles/foo/molecule/scenario3/requirements.yml", "requirements", id="9" + ), # requirements + pytest.param( + "roles/foo/molecule/scenario3/collections.yml", "requirements", id="10" + ), # requirements + pytest.param( + "roles/foo/meta/argument_specs.yml", "arg_specs", id="11" + ), # role argument specs + # tasks files: + pytest.param("tasks/directory with spaces/main.yml", "tasks", id="12"), # tasks + pytest.param("tasks/requirements.yml", "tasks", id="13"), # tasks + # requirements (we do not support includes yet) + pytest.param( + "requirements.yml", "requirements", id="14" + ), # collection requirements + pytest.param( + "roles/foo/meta/requirements.yml", "requirements", id="15" + ), # inside role requirements + # Undeterminable files: + pytest.param("test/fixtures/unknown-type.yml", "yaml", id="16"), + pytest.param( + "releasenotes/notes/run-playbooks-refactor.yaml", "reno", id="17" + ), # reno + pytest.param("examples/host_vars/localhost.yml", "vars", id="18"), + pytest.param("examples/group_vars/all.yml", "vars", id="19"), + pytest.param("examples/playbooks/vars/other.yml", "vars", id="20"), + pytest.param( + "examples/playbooks/vars/subfolder/settings.yml", "vars", id="21" + ), # deep vars + pytest.param( + "molecule/scenario/collections.yml", "requirements", id="22" + ), # deprecated 2.8 format + pytest.param( + "../roles/geerlingguy.mysql/tasks/configure.yml", "tasks", id="23" + ), # relative path involved + pytest.param("galaxy.yml", "galaxy", id="24"), + pytest.param("foo.j2.yml", "jinja2", id="25"), + pytest.param("foo.yml.j2", "jinja2", id="26"), + pytest.param("foo.j2.yaml", "jinja2", id="27"), + pytest.param("foo.yaml.j2", "jinja2", id="28"), + pytest.param( + "examples/playbooks/rulebook.yml", "playbook", id="29" + ), # playbooks folder should determine kind + pytest.param( + "examples/rulebooks/rulebook.yml", "rulebook", id="30" + ), # content should determine it as a rulebook + pytest.param( + "examples/yamllint/valid.yml", "yaml", id="31" + ), # empty yaml is valid yaml, not assuming anything else + pytest.param( + "examples/other/guess-1.yml", "playbook", id="32" + ), # content should determine is as a play + pytest.param( + "examples/playbooks/tasks/passing_task.yml", "tasks", id="33" + ), # content should determine is tasks + pytest.param("examples/collection/galaxy.yml", "galaxy", id="34"), + pytest.param("examples/meta/runtime.yml", "meta-runtime", id="35"), + pytest.param("examples/meta/changelogs/changelog.yaml", "changelog", id="36"), + pytest.param("examples/inventory/inventory.yml", "inventory", id="37"), + pytest.param("examples/inventory/production.yml", "inventory", id="38"), + pytest.param("examples/playbooks/vars/empty_vars.yml", "vars", id="39"), + pytest.param( + "examples/playbooks/vars/subfolder/settings.yaml", "vars", id="40" + ), + ), +) +def test_kinds(monkeypatch: MonkeyPatch, path: str, kind: FileType) -> None: + """Verify auto-detection logic based on DEFAULT_KINDS.""" + options = cli.get_config([]) + + # pylint: disable=unused-argument + def mockreturn(options: Namespace) -> dict[str, Any]: + return {normpath(path): kind} + + # assert Lintable is able to determine file type + lintable_detected = Lintable(path) + lintable_expected = Lintable(path, kind=kind) + assert lintable_detected == lintable_expected + + monkeypatch.setattr(file_utils, "discover_lintables", mockreturn) + result = file_utils.discover_lintables(options) + assert lintable_detected.kind == result[lintable_expected.name] + + +def test_guess_project_dir_tmp_path(tmp_path: Path) -> None: + """Verify guess_project_dir().""" + with cwd(str(tmp_path)): + result = guess_project_dir(None) + assert result == str(tmp_path) + + +def test_guess_project_dir_dotconfig() -> None: + """Verify guess_project_dir().""" + with cwd("examples"): + assert os.path.exists( + ".config/ansible-lint.yml" + ), "Test requires config file inside .config folder." + result = guess_project_dir(".config/ansible-lint.yml") + assert result == str(os.getcwd()) + + +BASIC_PLAYBOOK = """ +- name: "playbook" + tasks: + - name: Hello + debug: + msg: 'world' +""" + + +@pytest.fixture(name="tmp_updated_lintable") +def fixture_tmp_updated_lintable( + tmp_path: Path, path: str, content: str, updated_content: str +) -> Lintable: + """Create a temp file Lintable with a content update that is not on disk.""" + lintable = Lintable(tmp_path / path, content) + with lintable.path.open("w", encoding="utf-8") as f: + f.write(content) + # move mtime to a time in the past to avoid race conditions in the test + mtime = time.time() - 60 * 60 # 1hr ago + os.utime(str(lintable.path), (mtime, mtime)) + lintable.content = updated_content + return lintable + + +@pytest.mark.parametrize( + ("path", "content", "updated_content", "updated"), + ( + pytest.param( + "no_change.yaml", BASIC_PLAYBOOK, BASIC_PLAYBOOK, False, id="no_change" + ), + pytest.param( + "quotes.yaml", + BASIC_PLAYBOOK, + BASIC_PLAYBOOK.replace('"', "'"), + True, + id="updated_quotes", + ), + pytest.param( + "shorten.yaml", BASIC_PLAYBOOK, "# short file\n", True, id="shorten_file" + ), + ), +) +def test_lintable_updated( + path: str, content: str, updated_content: str, updated: bool +) -> None: + """Validate ``Lintable.updated`` when setting ``Lintable.content``.""" + lintable = Lintable(path, content) + + assert lintable.content == content + + lintable.content = updated_content + + assert lintable.content == updated_content + + assert lintable.updated is updated + + +@pytest.mark.parametrize( + "updated_content", ((None,), (b"bytes",)), ids=("none", "bytes") +) +def test_lintable_content_setter_with_bad_types(updated_content: Any) -> None: + """Validate ``Lintable.updated`` when setting ``Lintable.content``.""" + lintable = Lintable("bad_type.yaml", BASIC_PLAYBOOK) + assert lintable.content == BASIC_PLAYBOOK + + with pytest.raises(TypeError): + lintable.content = updated_content + + assert not lintable.updated + + +def test_lintable_with_new_file(tmp_path: Path) -> None: + """Validate ``Lintable.updated`` for a new file.""" + lintable = Lintable(tmp_path / "new.yaml") + + lintable.content = BASIC_PLAYBOOK + lintable.content = BASIC_PLAYBOOK + assert lintable.content == BASIC_PLAYBOOK + + assert lintable.updated + + assert not lintable.path.exists() + lintable.write() + assert lintable.path.exists() + assert lintable.path.read_text(encoding="utf-8") == BASIC_PLAYBOOK + + +@pytest.mark.parametrize( + ("path", "force", "content", "updated_content", "updated"), + ( + pytest.param( + "no_change.yaml", + False, + BASIC_PLAYBOOK, + BASIC_PLAYBOOK, + False, + id="no_change", + ), + pytest.param( + "forced.yaml", + True, + BASIC_PLAYBOOK, + BASIC_PLAYBOOK, + False, + id="forced_rewrite", + ), + pytest.param( + "quotes.yaml", + False, + BASIC_PLAYBOOK, + BASIC_PLAYBOOK.replace('"', "'"), + True, + id="updated_quotes", + ), + pytest.param( + "shorten.yaml", + False, + BASIC_PLAYBOOK, + "# short file\n", + True, + id="shorten_file", + ), + pytest.param( + "forced.yaml", + True, + BASIC_PLAYBOOK, + BASIC_PLAYBOOK.replace('"', "'"), + True, + id="forced_and_updated", + ), + ), +) +def test_lintable_write( + tmp_updated_lintable: Lintable, + force: bool, + content: str, + updated_content: str, + updated: bool, +) -> None: + """Validate ``Lintable.write`` writes when it should.""" + pre_updated = tmp_updated_lintable.updated + pre_stat = tmp_updated_lintable.path.stat() + + tmp_updated_lintable.write(force=force) + + post_stat = tmp_updated_lintable.path.stat() + post_updated = tmp_updated_lintable.updated + + # write() should not hide that an update happened + assert pre_updated == post_updated == updated + + if force or updated: + assert pre_stat.st_mtime < post_stat.st_mtime + else: + assert pre_stat.st_mtime == post_stat.st_mtime + + with tmp_updated_lintable.path.open("r", encoding="utf-8") as f: + post_content = f.read() + + if updated: + assert content != post_content + else: + assert content == post_content + assert post_content == updated_content + + +@pytest.mark.parametrize( + ("path", "content", "updated_content"), + ( + pytest.param( + "quotes.yaml", + BASIC_PLAYBOOK, + BASIC_PLAYBOOK.replace('"', "'"), + id="updated_quotes", + ), + ), +) +def test_lintable_content_deleter( + tmp_updated_lintable: Lintable, + content: str, + updated_content: str, +) -> None: + """Ensure that resetting content cache triggers re-reading file.""" + assert content != updated_content + assert tmp_updated_lintable.content == updated_content + del tmp_updated_lintable.content + assert tmp_updated_lintable.content == content + + +@pytest.mark.parametrize( + ("path", "result"), + ( + pytest.param("foo", "foo", id="rel"), + pytest.param(os.path.expanduser("~/xxx"), "~/xxx", id="rel-to-home"), + pytest.param("/a/b/c", "/a/b/c", id="absolute"), + pytest.param( + "examples/playbooks/roles", "examples/roles", id="resolve-symlink" + ), + ), +) +def test_normpath_path(path: str, result: str) -> None: + """Tests behavior of normpath.""" + assert normpath_path(path) == Path(result) + + +def test_bug_2513( + tmp_path: Path, + default_rules_collection: RulesCollection, +) -> None: + """Regression test for bug 2513. + + Test that when CWD is outside ~, and argument is like ~/playbook.yml + we will still be able to process the files. + See: https://github.com/ansible/ansible-lint/issues/2513 + """ + filename = "~/.cache/ansible-lint/playbook.yml" + os.makedirs(os.path.dirname(os.path.expanduser(filename)), exist_ok=True) + lintable = Lintable(filename, content="---\n- hosts: all\n") + lintable.write(force=True) + with cwd(str(tmp_path)): + results = Runner(filename, rules=default_rules_collection).run() + assert len(results) == 1 + assert results[0].rule.id == "name" diff --git a/test/test_formatter.py b/test/test_formatter.py new file mode 100644 index 0000000..29153f8 --- /dev/null +++ b/test/test_formatter.py @@ -0,0 +1,68 @@ +"""Test for output formatter.""" +# Copyright (c) 2016 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +import pathlib + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.formatters import Formatter +from ansiblelint.rules import AnsibleLintRule + +rule = AnsibleLintRule() +rule.id = "TCF0001" +formatter = Formatter(pathlib.Path.cwd(), display_relative_path=True) +# These details would generate a rich rendering error if not escaped: +DETAILS = "Some [/tmp/foo] details." + + +def test_format_coloured_string() -> None: + """Test formetting colored.""" + match = MatchError( + message="message", + linenumber=1, + details=DETAILS, + filename=Lintable("filename.yml", content=""), + rule=rule, + ) + formatter.format(match) + + +def test_unicode_format_string() -> None: + """Test formatting unicode.""" + match = MatchError( + message="\U0001f427", + linenumber=1, + details=DETAILS, + filename=Lintable("filename.yml", content=""), + rule=rule, + ) + formatter.format(match) + + +def test_dict_format_line() -> None: + """Test formatting dictionary details.""" + match = MatchError( + message="xyz", + linenumber=1, + details={"hello": "world"}, # type: ignore + filename=Lintable("filename.yml", content=""), + rule=rule, + ) + formatter.format(match) diff --git a/test/test_formatter_base.py b/test/test_formatter_base.py new file mode 100644 index 0000000..3e444a3 --- /dev/null +++ b/test/test_formatter_base.py @@ -0,0 +1,66 @@ +"""Tests related to base formatter.""" +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import pytest + +from ansiblelint.formatters import BaseFormatter + + +@pytest.mark.parametrize( + ("base_dir", "relative_path"), + ( + (None, True), + ("/whatever", False), + (Path("/whatever"), False), + ), +) +@pytest.mark.parametrize("path", ("/whatever/string", Path("/whatever/string"))) +def test_base_formatter_when_base_dir( + base_dir: Any, relative_path: bool, path: str +) -> None: + """Check that base formatter accepts relative pathlib and str.""" + # Given + base_formatter = BaseFormatter(base_dir, relative_path) # type: ignore + + # When + output_path = base_formatter._format_path(path) # pylint: disable=protected-access + + # Then + assert isinstance(output_path, (str, Path)) + # pylint: disable=protected-access + assert base_formatter._base_dir is None or isinstance( + base_formatter._base_dir, (str, Path) + ) + assert output_path == path + + +@pytest.mark.parametrize( + "base_dir", + ( + Path("/whatever"), + "/whatever", + ), +) +@pytest.mark.parametrize("path", ("/whatever/string", Path("/whatever/string"))) +def test_base_formatter_when_base_dir_is_given_and_relative_is_true( + path: str | Path, base_dir: str | Path +) -> None: + """Check that the base formatter equally accepts pathlib and str.""" + # Given + base_formatter = BaseFormatter(base_dir, True) # type: ignore + + # When + # pylint: disable=protected-access + output_path = base_formatter._format_path(path) + + # Then + assert isinstance(output_path, (str, Path)) + # pylint: disable=protected-access + assert isinstance(base_formatter._base_dir, (str, Path)) + assert output_path == Path(path).name + + +# vim: et:sw=4:syntax=python:ts=4: diff --git a/test/test_formatter_json.py b/test/test_formatter_json.py new file mode 100644 index 0000000..9e995aa --- /dev/null +++ b/test/test_formatter_json.py @@ -0,0 +1,131 @@ +"""Test the codeclimate JSON formatter.""" +from __future__ import annotations + +import json +import pathlib +import subprocess +import sys + +import pytest + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.formatters import CodeclimateJSONFormatter +from ansiblelint.rules import AnsibleLintRule + + +class TestCodeclimateJSONFormatter: + """Unit test for CodeclimateJSONFormatter.""" + + rule = AnsibleLintRule() + matches: list[MatchError] = [] + formatter: CodeclimateJSONFormatter | None = None + + def setup_class(self) -> None: + """Set up few MatchError objects.""" + self.rule = AnsibleLintRule() + self.rule.id = "TCF0001" + self.rule.severity = "VERY_HIGH" + self.matches = [] + self.matches.append( + MatchError( + message="message", + linenumber=1, + details="hello", + filename=Lintable("filename.yml", content=""), + rule=self.rule, + ) + ) + self.matches.append( + MatchError( + message="message", + linenumber=2, + details="hello", + filename=Lintable("filename.yml", content=""), + rule=self.rule, + ) + ) + self.formatter = CodeclimateJSONFormatter( + pathlib.Path.cwd(), display_relative_path=True + ) + + def test_format_list(self) -> None: + """Test if the return value is a string.""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + assert isinstance(self.formatter.format_result(self.matches), str) + + def test_result_is_json(self) -> None: + """Test if returned string value is a JSON.""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + json.loads(self.formatter.format_result(self.matches)) + + def test_single_match(self) -> None: + """Test negative case. Only lists are allowed. Otherwise a RuntimeError will be raised.""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + with pytest.raises(RuntimeError): + self.formatter.format_result(self.matches[0]) # type: ignore + + def test_result_is_list(self) -> None: + """Test if the return JSON contains a list with a length of 2.""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + result = json.loads(self.formatter.format_result(self.matches)) + assert len(result) == 2 + + def test_validate_codeclimate_schema(self) -> None: + """Test if the returned JSON is a valid codeclimate report.""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + result = json.loads(self.formatter.format_result(self.matches)) + single_match = result[0] + assert "type" in single_match + assert single_match["type"] == "issue" + assert "check_name" in single_match + assert "categories" in single_match + assert isinstance(single_match["categories"], list) + assert "severity" in single_match + assert single_match["severity"] == "blocker" + assert "description" in single_match + assert "fingerprint" in single_match + assert "location" in single_match + assert "path" in single_match["location"] + assert single_match["location"]["path"] == self.matches[0].filename + assert "lines" in single_match["location"] + assert single_match["location"]["lines"]["begin"] == self.matches[0].linenumber + assert "positions" not in single_match["location"] + + def test_validate_codeclimate_schema_with_positions(self) -> None: + """Test if the returned JSON is a valid codeclimate report (containing 'positions' instead of 'lines').""" + assert isinstance(self.formatter, CodeclimateJSONFormatter) + result = json.loads( + self.formatter.format_result( + [ + MatchError( + message="message", + linenumber=1, + column=42, + details="hello", + filename=Lintable("filename.yml", content=""), + rule=self.rule, + ) + ] + ) + ) + assert result[0]["location"]["positions"]["begin"]["line"] == 1 + assert result[0]["location"]["positions"]["begin"]["column"] == 42 + assert "lines" not in result[0]["location"] + + +def test_code_climate_parsable_ignored() -> None: + """Test that -p option does not alter codeclimate format.""" + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "-v", + "-p", + ] + file = "examples/playbooks/empty_playbook.yml" + result = subprocess.run([*cmd, file], check=False) + result2 = subprocess.run([*cmd, "-p", file], check=False) + + assert result.returncode == result2.returncode + assert result.stdout == result2.stdout diff --git a/test/test_formatter_sarif.py b/test/test_formatter_sarif.py new file mode 100644 index 0000000..7ef87b5 --- /dev/null +++ b/test/test_formatter_sarif.py @@ -0,0 +1,167 @@ +"""Test the codeclimate JSON formatter.""" +from __future__ import annotations + +import json +import os +import pathlib +import subprocess +import sys +from tempfile import NamedTemporaryFile + +import pytest + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.formatters import SarifFormatter +from ansiblelint.rules import AnsibleLintRule + + +class TestSarifFormatter: + """Unit test for SarifFormatter.""" + + rule = AnsibleLintRule() + matches: list[MatchError] = [] + formatter: SarifFormatter | None = None + + def setup_class(self) -> None: + """Set up few MatchError objects.""" + self.rule = AnsibleLintRule() + self.rule.id = "TCF0001" + self.rule.severity = "VERY_HIGH" + self.rule.description = "This is the rule description." + self.rule.link = "https://rules/help#TCF0001" + self.rule.tags = ["tag1", "tag2"] + self.matches = [] + self.matches.append( + MatchError( + message="message", + linenumber=1, + column=10, + details="hello", + filename=Lintable("filename.yml", content=""), + rule=self.rule, + tag="yaml[test]", + ) + ) + self.matches.append( + MatchError( + message="message", + linenumber=2, + details="hello", + filename=Lintable("filename.yml", content=""), + rule=self.rule, + tag="yaml[test]", + ) + ) + self.formatter = SarifFormatter(pathlib.Path.cwd(), display_relative_path=True) + + def test_format_list(self) -> None: + """Test if the return value is a string.""" + assert isinstance(self.formatter, SarifFormatter) + assert isinstance(self.formatter.format_result(self.matches), str) + + def test_result_is_json(self) -> None: + """Test if returned string value is a JSON.""" + assert isinstance(self.formatter, SarifFormatter) + json.loads(self.formatter.format_result(self.matches)) + + def test_single_match(self) -> None: + """Test negative case. Only lists are allowed. Otherwise a RuntimeError will be raised.""" + assert isinstance(self.formatter, SarifFormatter) + with pytest.raises(RuntimeError): + self.formatter.format_result(self.matches[0]) # type: ignore + + def test_result_is_list(self) -> None: + """Test if the return SARIF object contains the results with length of 2.""" + assert isinstance(self.formatter, SarifFormatter) + sarif = json.loads(self.formatter.format_result(self.matches)) + assert len(sarif["runs"][0]["results"]) == 2 + + def test_validate_sarif_schema(self) -> None: + """Test if the returned JSON is a valid SARIF report.""" + assert isinstance(self.formatter, SarifFormatter) + sarif = json.loads(self.formatter.format_result(self.matches)) + assert sarif["$schema"] == SarifFormatter.SARIF_SCHEMA + assert sarif["version"] == SarifFormatter.SARIF_SCHEMA_VERSION + driver = sarif["runs"][0]["tool"]["driver"] + assert driver["name"] == SarifFormatter.TOOL_NAME + assert driver["informationUri"] == SarifFormatter.TOOL_URL + rules = driver["rules"] + assert len(rules) == 1 + assert rules[0]["id"] == self.matches[0].tag + assert rules[0]["name"] == self.matches[0].tag + assert rules[0]["shortDescription"]["text"] == self.matches[0].message + assert rules[0]["defaultConfiguration"]["level"] == "error" + assert rules[0]["help"]["text"] == self.matches[0].rule.description + assert rules[0]["properties"]["tags"] == self.matches[0].rule.tags + assert rules[0]["helpUri"] == self.rule.link + results = sarif["runs"][0]["results"] + assert len(results) == 2 + for i, result in enumerate(results): + assert result["ruleId"] == self.matches[i].tag + assert result["message"]["text"] == self.matches[0].message + assert ( + result["locations"][0]["physicalLocation"]["artifactLocation"]["uri"] + == self.matches[i].filename + ) + assert ( + result["locations"][0]["physicalLocation"]["artifactLocation"][ + "uriBaseId" + ] + == SarifFormatter.BASE_URI_ID + ) + assert ( + result["locations"][0]["physicalLocation"]["region"]["startLine"] + == self.matches[i].linenumber + ) + if self.matches[i].column: + assert ( + result["locations"][0]["physicalLocation"]["region"]["startColumn"] + == self.matches[i].column + ) + else: + assert ( + "startColumn" + not in result["locations"][0]["physicalLocation"]["region"] + ) + assert sarif["runs"][0]["originalUriBaseIds"][SarifFormatter.BASE_URI_ID]["uri"] + + +def test_sarif_parsable_ignored() -> None: + """Test that -p option does not alter SARIF format.""" + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "-v", + "-p", + ] + file = "examples/playbooks/empty_playbook.yml" + result = subprocess.run([*cmd, file], check=False) + result2 = subprocess.run([*cmd, "-p", file], check=False) + + assert result.returncode == result2.returncode + assert result.stdout == result2.stdout + + +@pytest.mark.parametrize( + ("file", "return_code"), + ( + pytest.param("examples/playbooks/valid.yml", 0), + pytest.param("playbook.yml", 2), + ), +) +def test_sarif_file(file: str, return_code: int) -> None: + """Test ability to dump sarif file (--sarif-file).""" + with NamedTemporaryFile(mode="w", suffix=".sarif", prefix="output") as output_file: + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "--sarif-file", + str(output_file.name), + ] + result = subprocess.run([*cmd, file], check=False, capture_output=True) + assert result.returncode == return_code + assert os.path.exists(output_file.name) + assert os.path.getsize(output_file.name) > 0 diff --git a/test/test_import_include_role.py b/test/test_import_include_role.py new file mode 100644 index 0000000..5ecfa77 --- /dev/null +++ b/test/test_import_include_role.py @@ -0,0 +1,150 @@ +"""Tests related to role imports.""" +from __future__ import annotations + +from pathlib import Path + +import pytest +from _pytest.fixtures import SubRequest + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +ROLE_TASKS_MAIN = """\ +--- +- name: Shell instead of command + shell: echo hello world # noqa: fqcn no-free-form + changed_when: false +""" + +ROLE_TASKS_WORLD = """\ +--- +- ansible.builtin.debug: + msg: "this is a task without a name" +""" + +PLAY_IMPORT_ROLE = """\ +--- +- name: Test fixture + hosts: all + + tasks: + - name: Some import # noqa: fqcn + import_role: + name: test-role +""" + +PLAY_IMPORT_ROLE_FQCN = """\ +--- +- name: Test fixture + hosts: all + + tasks: + - name: Some import + ansible.builtin.import_role: + name: test-role +""" + +PLAY_IMPORT_ROLE_INLINE = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Some import + import_role: name=test-role # noqa: no-free-form fqcn +""" + +PLAY_INCLUDE_ROLE = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Some import + include_role: + name: test-role + tasks_from: world +""" + +PLAY_INCLUDE_ROLE_FQCN = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Some import + ansible.builtin.include_role: + name: test-role + tasks_from: world +""" + +PLAY_INCLUDE_ROLE_INLINE = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Some import + include_role: name=test-role tasks_from=world # noqa: no-free-form +""" + + +@pytest.fixture(name="playbook_path") +def fixture_playbook_path(request: SubRequest, tmp_path: Path) -> str: + """Create a reusable per-test role skeleton.""" + playbook_text = request.param + role_tasks_dir = tmp_path / "test-role" / "tasks" + role_tasks_dir.mkdir(parents=True) + (role_tasks_dir / "main.yml").write_text(ROLE_TASKS_MAIN) + (role_tasks_dir / "world.yml").write_text(ROLE_TASKS_WORLD) + play_path = tmp_path / "playbook.yml" + play_path.write_text(playbook_text) + return str(play_path) + + +@pytest.mark.parametrize( + ("playbook_path", "messages"), + ( + pytest.param( + PLAY_IMPORT_ROLE, + ["only when shell functionality is required", "All tasks should be named"], + id="IMPORT_ROLE", + ), + pytest.param( + PLAY_IMPORT_ROLE_FQCN, + ["only when shell functionality is required", "All tasks should be named"], + id="IMPORT_ROLE_FQCN", + ), + pytest.param( + PLAY_IMPORT_ROLE_INLINE, + ["only when shell functionality is require", "All tasks should be named"], + id="IMPORT_ROLE_INLINE", + ), + pytest.param( + PLAY_INCLUDE_ROLE, + ["only when shell functionality is require", "All tasks should be named"], + id="INCLUDE_ROLE", + ), + pytest.param( + PLAY_INCLUDE_ROLE_FQCN, + ["only when shell functionality is require", "All tasks should be named"], + id="INCLUDE_ROLE_FQCN", + ), + pytest.param( + PLAY_INCLUDE_ROLE_INLINE, + ["only when shell functionality is require", "All tasks should be named"], + id="INCLUDE_ROLE_INLINE", + ), + ), + indirect=("playbook_path",), +) +def test_import_role2( + default_rules_collection: RulesCollection, playbook_path: str, messages: list[str] +) -> None: + """Test that include_role digs deeper than import_role.""" + runner = Runner( + playbook_path, + rules=default_rules_collection, + skip_list=["fqcn[action-core]"], + ) + results = runner.run() + for message in messages: + assert message in str(results) + # Ensure no other unexpected messages are present + assert len(messages) == len(results), results diff --git a/test/test_import_playbook.py b/test/test_import_playbook.py new file mode 100644 index 0000000..66d8763 --- /dev/null +++ b/test/test_import_playbook.py @@ -0,0 +1,18 @@ +"""Test ability to import playbooks.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +def test_task_hook_import_playbook(default_rules_collection: RulesCollection) -> None: + """Assures import_playbook includes are recognized.""" + playbook_path = "examples/playbooks/playbook-parent.yml" + runner = Runner(playbook_path, rules=default_rules_collection) + results = runner.run() + + results_text = str(results) + assert len(runner.lintables) == 2 + assert len(results) == 2 + # Assures we detected the issues from imported playbook + assert "Commands should not change things" in results_text + assert "[name]" in results_text + assert "All tasks should be named" in results_text diff --git a/test/test_import_tasks.py b/test/test_import_tasks.py new file mode 100644 index 0000000..3254121 --- /dev/null +++ b/test/test_import_tasks.py @@ -0,0 +1,28 @@ +"""Test related to import of invalid files.""" +import pytest + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +@pytest.mark.parametrize( + "playbook_path", + ( + pytest.param( + "examples/playbooks/test_import_with_conflicting_action_statements.yml", + id="0", + ), + pytest.param("examples/playbooks/test_import_with_malformed.yml", id="1"), + ), +) +def test_import_tasks( + default_rules_collection: RulesCollection, playbook_path: str +) -> None: + """Assures import_playbook includes are recognized.""" + runner = Runner(playbook_path, rules=default_rules_collection) + results = runner.run() + + assert len(runner.lintables) == 1 + assert len(results) == 1 + # Assures we detected the issues from imported file + assert results[0].rule.id == "syntax-check" diff --git a/test/test_include_miss_file_with_role.py b/test/test_include_miss_file_with_role.py new file mode 100644 index 0000000..6834758 --- /dev/null +++ b/test/test_include_miss_file_with_role.py @@ -0,0 +1,43 @@ +"""Tests related to inclusions.""" +import pytest +from _pytest.logging import LogCaptureFixture + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +def test_cases_warning_message(default_rules_collection: RulesCollection) -> None: + """Test that including a non-existing file produces an error.""" + playbook_path = "examples/playbooks/play_miss_include.yml" + runner = Runner(playbook_path, rules=default_rules_collection) + results = runner.run() + + assert len(runner.lintables) == 3 + assert len(results) == 1 + assert "No such file or directory" in results[0].message + + +@pytest.mark.parametrize( + "playbook_path", + ( + pytest.param("examples/playbooks/test_include_inplace.yml", id="inplace"), + pytest.param("examples/playbooks/test_include_relative.yml", id="relative"), + ), +) +def test_cases_that_do_not_report( + playbook_path: str, + default_rules_collection: RulesCollection, + caplog: LogCaptureFixture, +) -> None: + """Test that relative inclusions are properly followed.""" + runner = Runner(playbook_path, rules=default_rules_collection) + result = runner.run() + noexist_message_count = 0 + + for record in caplog.records: + for msg in ("No such file or directory", "Couldn't open"): + if msg in str(record): + noexist_message_count += 1 + + assert noexist_message_count == 0 + assert len(result) == 0 diff --git a/test/test_internal_rules.py b/test/test_internal_rules.py new file mode 100644 index 0000000..b949238 --- /dev/null +++ b/test/test_internal_rules.py @@ -0,0 +1,8 @@ +"""Tests for internal rules.""" +from ansiblelint._internal.rules import BaseRule + + +def test_base_rule_url() -> None: + """Test that rule URL is set to expected value.""" + rule = BaseRule() + assert rule.url == "https://ansible-lint.readthedocs.io/rules/" diff --git a/test/test_lint_rule.py b/test/test_lint_rule.py new file mode 100644 index 0000000..2e13aa2 --- /dev/null +++ b/test/test_lint_rule.py @@ -0,0 +1,46 @@ +"""Tests for lintable.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +from test.rules.fixtures import ematcher, raw_task + +import pytest + +from ansiblelint.file_utils import Lintable + + +@pytest.fixture(name="lintable") +def fixture_lintable() -> Lintable: + """Return a playbook Lintable for use in this file's tests.""" + return Lintable("examples/playbooks/ematcher-rule.yml", kind="playbook") + + +def test_rule_matching(lintable: Lintable) -> None: + """Test rule.matchlines() on a playbook.""" + rule = ematcher.EMatcherRule() + matches = rule.matchlines(lintable) + assert len(matches) == 3 + + +def test_raw_rule_matching(lintable: Lintable) -> None: + """Test rule.matchlines() on a playbook.""" + rule = raw_task.RawTaskRule() + matches = rule.matchtasks(lintable) + assert len(matches) == 1 diff --git a/test/test_list_rules.py b/test/test_list_rules.py new file mode 100644 index 0000000..eab512b --- /dev/null +++ b/test/test_list_rules.py @@ -0,0 +1,76 @@ +"""Tests related to our logging/verbosity setup.""" + +import os + +import pytest + +from ansiblelint.testing import run_ansible_lint + + +def test_list_rules_includes_opt_in_rules() -> None: + """Checks that listing rules also includes the opt-in rules.""" + # Piggyback off the .yamllint in the root of the repo, just for testing. + # We'll "override" it with the one in the fixture. + cwd = os.path.realpath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") + ) + fakerole = os.path.join("test", "fixtures", "list-rules-tests") + + result_list_rules = run_ansible_lint("-L", fakerole, cwd=cwd) + + assert ("opt-in" in result_list_rules.stdout) is True + + +@pytest.mark.parametrize( + ("result", "returncode", "format_string"), + ( + (False, 0, "brief"), + (False, 0, "full"), + (False, 0, "md"), + (True, 2, "json"), + (True, 2, "codeclimate"), + (True, 2, "quiet"), + (True, 2, "pep8"), + (True, 2, "foo"), + ), + ids=( + "plain", + "full", + "md", + "json", + "codeclimate", + "quiet", + "pep8", + "foo", + ), +) +def test_list_rules_with_format_option( + result: bool, returncode: int, format_string: str +) -> None: + """Checks that listing rules with format options works.""" + # Piggyback off the .yamllint in the root of the repo, just for testing. + # We'll "override" it with the one in the fixture. + cwd = os.path.realpath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") + ) + fakerole = os.path.join("test", "fixtures", "list-rules-tests") + + result_list_rules = run_ansible_lint("-f", format_string, "-L", fakerole, cwd=cwd) + + assert (f"invalid choice: '{format_string}'" in result_list_rules.stderr) is result + assert ("syntax-check" in result_list_rules.stdout) is not result + assert result_list_rules.returncode is returncode + + +def test_list_tags_includes_opt_in_rules() -> None: + """Checks that listing tags also includes the opt-in rules.""" + # Piggyback off the .yamllint in the root of the repo, just for testing. + # We'll "override" it with the one in the fixture. + cwd = os.path.realpath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") + ) + fakerole = os.path.join("test", "fixtures", "list-rules-tests") + + result_list_tags = run_ansible_lint("-L", fakerole, cwd=cwd) + + assert ("opt-in" in result_list_tags.stdout) is True diff --git a/test/test_load_failure.py b/test/test_load_failure.py new file mode 100644 index 0000000..c0e008a --- /dev/null +++ b/test/test_load_failure.py @@ -0,0 +1,13 @@ +"""Tests for LoadFailureRule.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +def test_load_failure_encoding(default_rules_collection: RulesCollection) -> None: + """Check that we fail when file encoding is wrong.""" + runner = Runner("examples/broken/encoding.j2", rules=default_rules_collection) + matches = runner.run() + assert len(matches) == 1, matches + assert matches[0].rule.id == "load-failure" + assert "'utf-8' codec can't decode byte" in matches[0].message + assert matches[0].tag == "load-failure[unicodedecodeerror]" diff --git a/test/test_loaders.py b/test/test_loaders.py new file mode 100644 index 0000000..d07c26e --- /dev/null +++ b/test/test_loaders.py @@ -0,0 +1,8 @@ +"""Tests for loaders submodule.""" +from ansiblelint.loaders import load_ignore_txt + + +def test_load_ignore_txt() -> None: + """Test load_ignore_txt.""" + result = load_ignore_txt(".ansible-lint-ignore") + assert result == {"playbook2.yml": {"foo-bar", "package-latest"}} diff --git a/test/test_local_content.py b/test/test_local_content.py new file mode 100644 index 0000000..8455aaf --- /dev/null +++ b/test/test_local_content.py @@ -0,0 +1,13 @@ +"""Test playbooks with local content.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +def test_local_collection(default_rules_collection: RulesCollection) -> None: + """Assures local collections are found.""" + playbook_path = "test/local-content/test-collection.yml" + runner = Runner(playbook_path, rules=default_rules_collection) + results = runner.run() + + assert len(runner.lintables) == 1 + assert len(results) == 0 diff --git a/test/test_main.py b/test/test_main.py new file mode 100644 index 0000000..ae84f12 --- /dev/null +++ b/test/test_main.py @@ -0,0 +1,78 @@ +"""Tests related to ansiblelint.__main__ module.""" +import os +import shutil +import subprocess +import sys +import time +from pathlib import Path +from typing import Any + +import pytest + +from ansiblelint.config import get_version_warning + + +@pytest.mark.parametrize( + ("expected_warning"), + (False, True), + ids=("normal", "isolated"), +) +def test_call_from_outside_venv(expected_warning: bool) -> None: + """Asserts ability to be called w/ or w/o venv activation.""" + git_location = shutil.which("git") + assert git_location + + if expected_warning: + env = {"HOME": str(Path.home()), "PATH": str(os.path.dirname(git_location))} + else: + env = os.environ.copy() + + for v in ("COVERAGE_FILE", "COVERAGE_PROCESS_START"): + if v in os.environ: + env[v] = os.environ[v] + + py_path = os.path.dirname(sys.executable) + # Passing custom env prevents the process from inheriting PATH or other + # environment variables from the current process, so we emulate being + # called from outside the venv. + proc = subprocess.run( + [f"{py_path}/ansible-lint", "--version"], + check=False, + capture_output=True, + text=True, + env=env, + ) + assert proc.returncode == 0, proc + warning_found = "PATH altered to include" in proc.stderr + assert warning_found is expected_warning + + +@pytest.mark.parametrize( + ("ver_diff", "found", "check", "outlen"), + ( + ("v1.2.2", True, "pre-release", 1), + ("v1.2.3", False, "", 1), + ("v1.2.4", True, "new release", 2), + ), +) +def test_get_version_warning( + mocker: Any, ver_diff: str, found: bool, check: str, outlen: int +) -> None: + """Assert get_version_warning working as expected.""" + data = '{"html_url": "https://127.0.0.1", "tag_name": "' + f"{ver_diff}" + '"}' + # simulate cache file + mocker.patch("os.path.exists", return_value=True) + mocker.patch("os.path.getmtime", return_value=time.time()) + mocker.patch("builtins.open", mocker.mock_open(read_data=data)) + # overwrite ansible-lint version + mocker.patch("ansiblelint.config.__version__", "1.2.3") + # overwrite install method to custom one. This one will increase msg line count + # to easily detect unwanted call to it. + mocker.patch("ansiblelint.config.guess_install_method", return_value="\n") + msg = get_version_warning() + + if not found: + assert msg == check + else: + assert check in msg + assert len(msg.split("\n")) == outlen diff --git a/test/test_matcherrror.py b/test/test_matcherrror.py new file mode 100644 index 0000000..041b7ce --- /dev/null +++ b/test/test_matcherrror.py @@ -0,0 +1,203 @@ +"""Tests for MatchError.""" + +import operator +from typing import Any, Callable + +import pytest + +from ansiblelint.errors import MatchError +from ansiblelint.file_utils import Lintable +from ansiblelint.rules.no_changed_when import CommandHasChangesCheckRule +from ansiblelint.rules.partial_become import BecomeUserWithoutBecomeRule + + +class DummyTestObject: + """A dummy object for equality tests.""" + + def __repr__(self) -> str: + """Return a dummy object representation for parametrize.""" + return f"{self.__class__.__name__}()" + + def __eq__(self, other: object) -> bool: + """Report the equality check failure with any object.""" + return False + + def __ne__(self, other: object) -> bool: + """Report the confirmation of inequality with any object.""" + return True + + +class DummySentinelTestObject: + """A dummy object for equality protocol tests with sentinel.""" + + def __eq__(self, other: object) -> bool: + """Return sentinel as result of equality check w/ anything.""" + return "EQ_SENTINEL" # type: ignore + + def __ne__(self, other: object) -> bool: + """Return sentinel as result of inequality check w/ anything.""" + return "NE_SENTINEL" # type: ignore + + def __lt__(self, other: object) -> bool: + """Return sentinel as result of less than check w/ anything.""" + return "LT_SENTINEL" # type: ignore + + def __gt__(self, other: object) -> bool: + """Return sentinel as result of greater than chk w/ anything.""" + return "GT_SENTINEL" # type: ignore + + +@pytest.mark.parametrize( + ("left_match_error", "right_match_error"), + ( + (MatchError("foo"), MatchError("foo")), + (MatchError("a", details="foo"), MatchError("a", details="foo")), + ), +) +def test_matcherror_compare( + left_match_error: MatchError, right_match_error: MatchError +) -> None: + """Check that MatchError instances with similar attrs are equivalent.""" + assert left_match_error == right_match_error + + +def test_matcherror_invalid() -> None: + """Ensure that MatchError requires message or rule.""" + expected_err = ( + r"^MatchError\(\) missing a required argument: one of 'message' or 'rule'$" + ) + with pytest.raises(TypeError, match=expected_err): + raise MatchError() + + +@pytest.mark.parametrize( + ("left_match_error", "right_match_error"), + ( + # sorting by message + (MatchError("z"), MatchError("a")), + # filenames takes priority in sorting + ( + MatchError("a", filename=Lintable("b", content="")), + MatchError("a", filename=Lintable("a", content="")), + ), + # rule id partial-become > rule id no-changed-when + ( + MatchError(rule=BecomeUserWithoutBecomeRule()), + MatchError(rule=CommandHasChangesCheckRule()), + ), + # details are taken into account + (MatchError("a", details="foo"), MatchError("a", details="bar")), + # columns are taken into account + (MatchError("a", column=3), MatchError("a", column=1)), + (MatchError("a", column=3), MatchError("a")), + ), +) +class TestMatchErrorCompare: + """Test the comparison of MatchError instances.""" + + @staticmethod + def test_match_error_less_than( + left_match_error: MatchError, right_match_error: MatchError + ) -> None: + """Check 'less than' protocol implementation in MatchError.""" + assert right_match_error < left_match_error + + @staticmethod + def test_match_error_greater_than( + left_match_error: MatchError, right_match_error: MatchError + ) -> None: + """Check 'greater than' protocol implementation in MatchError.""" + assert left_match_error > right_match_error + + @staticmethod + def test_match_error_not_equal( + left_match_error: MatchError, right_match_error: MatchError + ) -> None: + """Check 'not equals' protocol implementation in MatchError.""" + assert left_match_error != right_match_error + + +@pytest.mark.parametrize( + "other", + ( + None, + "foo", + 42, + Exception("foo"), + ), + ids=repr, +) +@pytest.mark.parametrize( + ("operation", "operator_char"), + ( + pytest.param(operator.le, "<=", id="<="), + pytest.param(operator.gt, ">", id=">"), + ), +) +def test_matcherror_compare_no_other_fallback( + other: Any, operation: Callable[..., bool], operator_char: str +) -> None: + """Check that MatchError comparison with other types causes TypeError.""" + expected_error = ( + r"^(" + r"unsupported operand type\(s\) for {operator!s}:|" + r"'{operator!s}' not supported between instances of" + r") 'MatchError' and '{other_type!s}'$".format( + other_type=type(other).__name__, operator=operator_char + ) + ) + with pytest.raises(TypeError, match=expected_error): + operation(MatchError("foo"), other) + + +@pytest.mark.parametrize( + "other", + ( + None, + "foo", + 42, + Exception("foo"), + DummyTestObject(), + ), + ids=repr, +) +@pytest.mark.parametrize( + ("operation", "expected_value"), + ( + (operator.eq, False), + (operator.ne, True), + ), + ids=("==", "!="), +) +def test_matcherror_compare_with_other_fallback( + other: object, + operation: Callable[..., bool], + expected_value: bool, +) -> None: + """Check that MatchError comparison runs other types fallbacks.""" + assert operation(MatchError("foo"), other) is expected_value + + +@pytest.mark.parametrize( + ("operation", "expected_value"), + ( + (operator.eq, "EQ_SENTINEL"), + (operator.ne, "NE_SENTINEL"), + # NOTE: these are swapped because when we do `x < y`, and `x.__lt__(y)` + # NOTE: returns `NotImplemented`, Python will reverse the check into + # NOTE: `y > x`, and so `y.__gt__(x) is called. + # Ref: https://docs.python.org/3/reference/datamodel.html#object.__lt__ + (operator.lt, "GT_SENTINEL"), + (operator.gt, "LT_SENTINEL"), + ), + ids=("==", "!=", "<", ">"), +) +def test_matcherror_compare_with_dummy_sentinel( + operation: Callable[..., bool], expected_value: str +) -> None: + """Check that MatchError comparison runs other types fallbacks.""" + dummy_obj = DummySentinelTestObject() + # NOTE: This assertion abuses the CPython property to cache short string + # NOTE: objects because the identity check is more precise and we don't + # NOTE: want extra operator protocol methods to influence the test. + assert operation(MatchError("foo"), dummy_obj) is expected_value # type: ignore diff --git a/test/test_mockings.py b/test/test_mockings.py new file mode 100644 index 0000000..304cb6c --- /dev/null +++ b/test/test_mockings.py @@ -0,0 +1,13 @@ +"""Test mockings module.""" +import pytest + +from ansiblelint._mockings import _make_module_stub +from ansiblelint.constants import INVALID_CONFIG_RC + + +def test_make_module_stub() -> None: + """Test make module stub.""" + with pytest.raises(SystemExit) as exc: + _make_module_stub("") + assert exc.type == SystemExit + assert exc.value.code == INVALID_CONFIG_RC diff --git a/test/test_profiles.py b/test/test_profiles.py new file mode 100644 index 0000000..6f7c34f --- /dev/null +++ b/test/test_profiles.py @@ -0,0 +1,50 @@ +"""Tests for the --profile feature.""" +import platform +import subprocess +import sys + +from _pytest.capture import CaptureFixture + +from ansiblelint.rules import RulesCollection, filter_rules_with_profile +from ansiblelint.rules.risky_shell_pipe import ShellWithoutPipefail + + +def test_profile_min() -> None: + """Asserts our ability to unload rules based on profile.""" + collection = RulesCollection() + assert len(collection.rules) == 4, "Unexpected number of implicit rules." + # register one extra rule that we know not to be part of "min" profile + + collection.register(ShellWithoutPipefail()) + assert len(collection.rules) == 5, "Failed to register new rule." + + filter_rules_with_profile(collection.rules, "min") + assert ( + len(collection.rules) == 3 + ), "Failed to unload rule that is not part of 'min' profile." + + +def test_profile_listing(capfd: CaptureFixture[str]) -> None: + """Test that run without arguments it will detect and lint the entire repository.""" + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "-P", + ] + result = subprocess.run(cmd, check=False).returncode + assert result == 0 + + out, err = capfd.readouterr() + + # Confirmation that it runs in auto-detect mode + assert "command-instead-of-module" in out + if sys.version_info < (3, 9): + assert "ansible-lint is no longer tested" in err + else: + # On WSL we might see this warning on stderr: + # [WARNING]: Ansible is being run in a world writable directory + # WSL2 has "WSL2" in platform name but WSL1 has "microsoft": + platform_name = platform.platform().lower() + if all(word not in platform_name for word in ["wsl", "microsoft"]): + assert err == "", platform_name diff --git a/test/test_progressive.py b/test/test_progressive.py new file mode 100644 index 0000000..805291b --- /dev/null +++ b/test/test_progressive.py @@ -0,0 +1,93 @@ +"""Test for the --progressive mode.""" +from __future__ import annotations + +import json +import subprocess +import sys +from pathlib import Path + +from ansiblelint.constants import GIT_CMD +from ansiblelint.file_utils import cwd + +FAULTY_PLAYBOOK = """--- +- name: faulty + hosts: localhost + tasks: + - name: hello + debug: + msg: world +""" + +CORRECT_PLAYBOOK = """--- +- name: Correct + hosts: localhost + tasks: + - name: Hello + ansible.builtin.debug: + msg: world +""" + + +def git_init() -> None: + """Init temporary git repository.""" + subprocess.run([*GIT_CMD, "init", "--initial-branch=main"], check=True) + subprocess.run([*GIT_CMD, "config", "user.email", "test@example.com"], check=True) + subprocess.run([*GIT_CMD, "config", "user.name", "test"], check=True) + + +def git_commit(filename: Path, content: str) -> None: + """Create and commit a file.""" + filename.write_text(content) + subprocess.run([*GIT_CMD, "add", filename], check=True) + subprocess.run( + [ + *GIT_CMD, + "commit", + "--no-gpg-sign", + "-a", + "-m", + f"Commit {filename}", + ], + check=True, + ) + + +def run_lint(cmd: list[str]) -> subprocess.CompletedProcess[str]: + """Run ansible-lint.""" + # pylint: disable=subprocess-run-check + return subprocess.run( + cmd, + capture_output=True, + text=True, + ) + + +def test_validate_progressive_mode_json_output(tmp_path: Path) -> None: + """Test that covers the following scenarios for progressive mode. + + 1. JSON output is valid in quiet and verbose modes + 2. New files are correctly handled whether lintable paths are passed or not + 3. Regression is not reported when the last commit doesn't add any new violations + """ + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "--progressive", + "-f", + "json", + ] + with cwd(tmp_path): + git_init() + git_commit(tmp_path / "README.md", "pytest") + git_commit(tmp_path / "playbook-faulty.yml", FAULTY_PLAYBOOK) + cmd.append("-q") + res = run_lint(cmd) + assert res.returncode == 2 + json.loads(res.stdout) + + git_commit(tmp_path / "playbook-correct.yml", CORRECT_PLAYBOOK) + cmd.extend(["-vv", "playbook-correct.yml"]) + res = run_lint(cmd) + assert res.returncode == 0 + json.loads(res.stdout) diff --git a/test/test_rule_properties.py b/test/test_rule_properties.py new file mode 100644 index 0000000..7db3afd --- /dev/null +++ b/test/test_rule_properties.py @@ -0,0 +1,16 @@ +"""Tests related to rule properties.""" +from ansiblelint.rules import RulesCollection + + +def test_severity_valid(default_rules_collection: RulesCollection) -> None: + """Test that rules collection only has allow-listed severities.""" + valid_severity_values = [ + "VERY_HIGH", + "HIGH", + "MEDIUM", + "LOW", + "VERY_LOW", + "INFO", + ] + for rule in default_rules_collection: + assert rule.severity in valid_severity_values diff --git a/test/test_rules_collection.py b/test/test_rules_collection.py new file mode 100644 index 0000000..60a2b83 --- /dev/null +++ b/test/test_rules_collection.py @@ -0,0 +1,170 @@ +"""Tests for rule collection class.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import collections +import os +import re + +import pytest + +from ansiblelint.config import options +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import RulesCollection +from ansiblelint.testing import run_ansible_lint + + +@pytest.fixture(name="test_rules_collection") +def fixture_test_rules_collection() -> RulesCollection: + """Create a shared rules collection test instance.""" + return RulesCollection([os.path.abspath("./test/rules/fixtures")]) + + +@pytest.fixture(name="ematchtestfile") +def fixture_ematchtestfile() -> Lintable: + """Produce a test lintable with an id violation.""" + return Lintable("examples/playbooks/ematcher-rule.yml", kind="playbook") + + +@pytest.fixture(name="bracketsmatchtestfile") +def fixture_bracketsmatchtestfile() -> Lintable: + """Produce a test lintable with matching brackets.""" + return Lintable("examples/playbooks/bracketsmatchtest.yml", kind="playbook") + + +def test_load_collection_from_directory(test_rules_collection: RulesCollection) -> None: + """Test that custom rules extend the default ones.""" + # two detected rules plus the internal ones + assert len(test_rules_collection) == 7 + + +def test_run_collection( + test_rules_collection: RulesCollection, ematchtestfile: Lintable +) -> None: + """Test that default rules match pre-meditated violations.""" + matches = test_rules_collection.run(ematchtestfile) + assert len(matches) == 4 # 3 occurrences of BANNED using TEST0001 + 1 for raw-task + assert matches[0].linenumber == 3 + + +def test_tags( + test_rules_collection: RulesCollection, + ematchtestfile: Lintable, + bracketsmatchtestfile: Lintable, +) -> None: + """Test that tags are treated as skip markers.""" + matches = test_rules_collection.run(ematchtestfile, tags={"test1"}) + assert len(matches) == 3 + matches = test_rules_collection.run(ematchtestfile, tags={"test2"}) + assert len(matches) == 0 + matches = test_rules_collection.run(bracketsmatchtestfile, tags={"test1"}) + assert len(matches) == 0 + matches = test_rules_collection.run(bracketsmatchtestfile, tags={"test2"}) + assert len(matches) == 2 + + +def test_skip_tags( + test_rules_collection: RulesCollection, + ematchtestfile: Lintable, + bracketsmatchtestfile: Lintable, +) -> None: + """Test that tags can be skipped.""" + matches = test_rules_collection.run(ematchtestfile, skip_list=["test1", "test3"]) + assert len(matches) == 0 + matches = test_rules_collection.run(ematchtestfile, skip_list=["test2", "test3"]) + assert len(matches) == 3 + matches = test_rules_collection.run(bracketsmatchtestfile, skip_list=["test1"]) + assert len(matches) == 2 + matches = test_rules_collection.run(bracketsmatchtestfile, skip_list=["test2"]) + assert len(matches) == 0 + + +def test_skip_id( + test_rules_collection: RulesCollection, + ematchtestfile: Lintable, + bracketsmatchtestfile: Lintable, +) -> None: + """Check that skipping valid IDs excludes their violations.""" + matches = test_rules_collection.run( + ematchtestfile, skip_list=["TEST0001", "raw-task"] + ) + assert len(matches) == 0 + matches = test_rules_collection.run( + ematchtestfile, skip_list=["TEST0002", "raw-task"] + ) + assert len(matches) == 3 + matches = test_rules_collection.run(bracketsmatchtestfile, skip_list=["TEST0001"]) + assert len(matches) == 2 + matches = test_rules_collection.run(bracketsmatchtestfile, skip_list=["TEST0002"]) + assert len(matches) == 0 + + +def test_skip_non_existent_id( + test_rules_collection: RulesCollection, ematchtestfile: Lintable +) -> None: + """Check that skipping invalid IDs changes nothing.""" + matches = test_rules_collection.run(ematchtestfile, skip_list=["DOESNOTEXIST"]) + assert len(matches) == 4 + + +def test_no_duplicate_rule_ids() -> None: + """Check that rules of the collection don't have duplicate IDs.""" + real_rules = RulesCollection([os.path.abspath("./src/ansiblelint/rules")]) + rule_ids = [rule.id for rule in real_rules] + assert not any(y > 1 for y in collections.Counter(rule_ids).values()) + + +def test_rich_rule_listing() -> None: + """Test that rich list format output is rendered as a table. + + This check also offers the contract of having rule id, short and long + descriptions in the console output. + """ + rules_path = os.path.abspath("./test/rules/fixtures") + result = run_ansible_lint("-r", rules_path, "-f", "full", "-L") + assert result.returncode == 0 + + for rule in RulesCollection([rules_path]): + assert rule.id in result.stdout + assert rule.shortdesc in result.stdout + # description could wrap inside table, so we do not check full length + assert rule.description[:30] in result.stdout + + +def test_rules_id_format() -> None: + """Assure all our rules have consistent format.""" + rule_id_re = re.compile("^[a-z-]{4,30}$") + # options.enable_list = ["no-same-owner", "no-log-password", "no-same-owner"] + rules = RulesCollection( + [os.path.abspath("./src/ansiblelint/rules")], options=options, conditional=False + ) + keys: set[str] = set() + for rule in rules: + assert rule_id_re.match( + rule.id + ), f"Rule id {rule.id} did not match our required format." + keys.add(rule.id) + assert ( + rule.help != "" or rule.description or rule.__doc__ + ), f"Rule {rule.id} must have at least one of: .help, .description, .__doc__" + assert "yaml" in keys, "yaml rule is missing" + assert len(rules) == 50 # update this number when adding new rules! + assert len(keys) == len(rules), "Duplicate rule ids?" diff --git a/test/test_runner.py b/test/test_runner.py new file mode 100644 index 0000000..0cd1c77 --- /dev/null +++ b/test/test_runner.py @@ -0,0 +1,159 @@ +"""Tests for runner submodule.""" +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +from __future__ import annotations + +import os +from typing import Any + +import pytest + +from ansiblelint import formatters +from ansiblelint.file_utils import Lintable, abspath +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +LOTS_OF_WARNINGS_PLAYBOOK = abspath( + "examples/playbooks/lots_of_warnings.yml", os.getcwd() +) + + +@pytest.mark.parametrize( + ("playbook", "exclude", "length"), + ( + pytest.param("examples/playbooks/nomatchestest.yml", [], 0, id="nomatchestest"), + pytest.param("examples/playbooks/unicode.yml", [], 1, id="unicode"), + pytest.param( + LOTS_OF_WARNINGS_PLAYBOOK, + [LOTS_OF_WARNINGS_PLAYBOOK], + 0, + id="lots_of_warnings", + ), + pytest.param("examples/playbooks/become.yml", [], 0, id="become"), + pytest.param( + "examples/playbooks/contains_secrets.yml", [], 0, id="contains_secrets" + ), + ), +) +def test_runner( + default_rules_collection: RulesCollection, + playbook: str, + exclude: list[str], + length: int, +) -> None: + """Test that runner can go through any corner cases.""" + runner = Runner(playbook, rules=default_rules_collection, exclude_paths=exclude) + + matches = runner.run() + + assert len(matches) == length + + +def test_runner_exclude_paths(default_rules_collection: RulesCollection) -> None: + """Test that exclude paths do work.""" + runner = Runner( + "examples/playbooks/example.yml", + rules=default_rules_collection, + exclude_paths=["examples/"], + ) + + matches = runner.run() + assert len(matches) == 0 + + +@pytest.mark.parametrize(("exclude_path"), ("**/playbooks/*.yml",)) +def test_runner_exclude_globs( + default_rules_collection: RulesCollection, exclude_path: str +) -> None: + """Test that globs work.""" + runner = Runner( + "examples/playbooks", + rules=default_rules_collection, + exclude_paths=[exclude_path], + ) + + matches = runner.run() + # we expect to find one error from the only .yaml file we have there. + assert len(matches) == 1 + + +@pytest.mark.parametrize( + ("formatter_cls"), + ( + pytest.param(formatters.Formatter, id="Formatter-plain"), + pytest.param(formatters.ParseableFormatter, id="ParseableFormatter-colored"), + pytest.param(formatters.QuietFormatter, id="QuietFormatter-colored"), + pytest.param(formatters.Formatter, id="Formatter-colored"), + ), +) +def test_runner_unicode_format( + default_rules_collection: RulesCollection, + formatter_cls: type[formatters.BaseFormatter[Any]], +) -> None: + """Check that all formatters are unicode-friendly.""" + formatter = formatter_cls(os.getcwd(), display_relative_path=True) + runner = Runner( + Lintable("examples/playbooks/unicode.yml", kind="playbook"), + rules=default_rules_collection, + ) + + matches = runner.run() + + formatter.format(matches[0]) + + +@pytest.mark.parametrize("directory_name", ("test/", os.path.abspath("test"))) +def test_runner_with_directory( + default_rules_collection: RulesCollection, directory_name: str +) -> None: + """Check that runner detects a directory as role.""" + runner = Runner(directory_name, rules=default_rules_collection) + + expected = Lintable(name=directory_name, kind="role") + assert expected in runner.lintables + + +def test_files_not_scanned_twice(default_rules_collection: RulesCollection) -> None: + """Ensure that lintables aren't double-checked.""" + checked_files: set[Lintable] = set() + + filename = os.path.abspath("examples/playbooks/common-include-1.yml") + runner = Runner( + filename, + rules=default_rules_collection, + verbosity=0, + checked_files=checked_files, + ) + run1 = runner.run() + assert len(runner.checked_files) == 2 + assert len(run1) == 1 + + filename = os.path.abspath("examples/playbooks/common-include-2.yml") + runner = Runner( + filename, + rules=default_rules_collection, + verbosity=0, + checked_files=checked_files, + ) + run2 = runner.run() + assert len(runner.checked_files) == 3 + # this second run should return 0 because the included filed was already + # processed and added to checked_files, which acts like a bypass list. + assert len(run2) == 0 diff --git a/test/test_schemas.py b/test/test_schemas.py new file mode 100644 index 0000000..5c89c11 --- /dev/null +++ b/test/test_schemas.py @@ -0,0 +1,99 @@ +"""Test schemas modules.""" +import logging +import subprocess +import sys +import urllib +from time import sleep +from typing import Any +from unittest.mock import DEFAULT, MagicMock, patch + +import pytest + +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner +from ansiblelint.schemas import refresh_schemas, validate_file_schema + + +def test_refresh_schemas() -> None: + """Test for schema update skip.""" + # This is written as a single test in order to avoid concurrency issues, + # which caused random issues on CI when the two tests run in parallel + # and or in different order. + assert refresh_schemas(min_age_seconds=3600 * 24 * 365 * 10) == 0 + sleep(1) + # this should disable the cache and force an update + assert refresh_schemas(min_age_seconds=0) == 1 + sleep(1) + # should be cached now + assert refresh_schemas(min_age_seconds=10) == 0 + + +@pytest.mark.parametrize( + ("file", "expected_tags"), + ( + pytest.param( + "examples/changelogs/changelog.yaml", ["schema[changelog]"], id="changelog" + ), + ), +) +def test_schema( + default_rules_collection: RulesCollection, + file: str, + expected_tags: list[str], +) -> None: + """Test that runner can go through any corner cases.""" + runner = Runner(file, rules=default_rules_collection) + matches = runner.run() + + assert len(matches) == len(expected_tags) + for i, match in enumerate(matches): + assert match.tag == expected_tags[i] + + +def urlopen_side_effect(*_args: Any, **kwargs: Any) -> DEFAULT: + """Actual test that timeout parameter is defined.""" + assert "timeout" in kwargs + assert kwargs["timeout"] > 0 + return DEFAULT + + +@patch("urllib.request") +def test_requests_uses_timeout(mock_request: MagicMock) -> None: + """Test that schema refresh uses timeout.""" + mock_request.urlopen.side_effect = urlopen_side_effect + refresh_schemas(min_age_seconds=0) + mock_request.urlopen.assert_called() + + +@patch("urllib.request") +def test_request_timeouterror_handling( + mock_request: MagicMock, caplog: pytest.LogCaptureFixture +) -> None: + """Test that schema refresh can handle time out errors.""" + error_msg = "Simulating handshake operation time out." + mock_request.urlopen.side_effect = urllib.error.URLError(TimeoutError(error_msg)) + with caplog.at_level(logging.DEBUG): + assert refresh_schemas(min_age_seconds=0) == 1 + mock_request.urlopen.assert_called() + assert "Skipped schema refresh due to unexpected exception: " in caplog.text + assert error_msg in caplog.text + + +def test_schema_refresh_cli() -> None: + """Ensure that we test the cli schema refresh command.""" + proc = subprocess.run( + [sys.executable, "-m", "ansiblelint.schemas"], + check=False, + capture_output=True, + text=True, + ) + assert proc.returncode == 0 + + +def test_validate_file_schema() -> None: + """Test file schema validation failure on unknown file kind.""" + lintable = Lintable("foo.bar", kind="") + result = validate_file_schema(lintable) + assert len(result) == 1, result + assert "Unable to find JSON Schema" in result[0] diff --git a/test/test_skip_import_playbook.py b/test/test_skip_import_playbook.py new file mode 100644 index 0000000..8e333fe --- /dev/null +++ b/test/test_skip_import_playbook.py @@ -0,0 +1,48 @@ +"""Test related to skipping import_playbook.""" +from pathlib import Path + +import pytest + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +IMPORTED_PLAYBOOK = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Success # noqa: no-free-form + ansible.builtin.fail: msg="fail" + when: false +""" + +MAIN_PLAYBOOK = """\ +--- +- name: Fixture + hosts: all + + tasks: + - name: Should be shell # noqa: command-instead-of-shell no-changed-when no-free-form + ansible.builtin.shell: echo lol + +- name: Should not be imported + import_playbook: imported_playbook.yml +""" + + +@pytest.fixture(name="playbook") +def fixture_playbook(tmp_path: Path) -> str: + """Create a reusable per-test playbook.""" + playbook_path = tmp_path / "playbook.yml" + playbook_path.write_text(MAIN_PLAYBOOK) + (tmp_path / "imported_playbook.yml").write_text(IMPORTED_PLAYBOOK) + return str(playbook_path) + + +def test_skip_import_playbook( + default_rules_collection: RulesCollection, playbook: str +) -> None: + """Verify that a playbook import is skipped after a failure.""" + runner = Runner(playbook, rules=default_rules_collection) + results = runner.run() + assert len(results) == 0 diff --git a/test/test_skip_inside_yaml.py b/test/test_skip_inside_yaml.py new file mode 100644 index 0000000..d90c45d --- /dev/null +++ b/test/test_skip_inside_yaml.py @@ -0,0 +1,75 @@ +"""Tests related to use of inline noqa.""" +import pytest + +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner +from ansiblelint.testing import RunFromText, run_ansible_lint + +ROLE_TASKS = """\ +--- +- ansible.builtin.debug: + msg: this should fail linting due lack of name +- ansible.builtin.debug: # noqa: unnamed-task + msg: this should pass due to noqa comment +""" + +ROLE_TASKS_WITH_BLOCK = """\ +--- +- name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d +- name: Bad git 2 + action: ansible.builtin.git a=b c=d +- name: Block with rescue and always section + block: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d + rescue: + - name: Bad git 5 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 6 + action: ansible.builtin.git a=b c=d + always: + - name: Bad git 7 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 8 + action: ansible.builtin.git a=b c=d +""" + + +def test_role_tasks(default_text_runner: RunFromText) -> None: + """Check that role tasks can contain skips.""" + results = default_text_runner.run_role_tasks_main(ROLE_TASKS) + assert len(results) == 1, results + assert results[0].linenumber == 2 + assert results[0].tag == "name[missing]" + assert results[0].rule.id == "name" + + +def test_role_tasks_with_block(default_text_runner: RunFromText) -> None: + """Check that blocks in role tasks can contain skips.""" + results = default_text_runner.run_role_tasks_main(ROLE_TASKS_WITH_BLOCK) + assert len(results) == 12 + + +@pytest.mark.parametrize( + ("lintable", "expected"), + (pytest.param("examples/playbooks/test_skip_inside_yaml.yml", 10, id="yaml"),), +) +def test_inline_skips( + default_rules_collection: RulesCollection, lintable: str, expected: int +) -> None: + """Check that playbooks can contain skips.""" + results = Runner(lintable, rules=default_rules_collection).run() + + assert len(results) == expected + + +def test_role_meta() -> None: + """Test running from inside meta folder.""" + role_path = "examples/roles/meta_noqa" + + result = run_ansible_lint("-v", role_path) + assert len(result.stdout) == 0 + assert result.returncode == 0 diff --git a/test/test_skip_playbook_items.py b/test/test_skip_playbook_items.py new file mode 100644 index 0000000..06729af --- /dev/null +++ b/test/test_skip_playbook_items.py @@ -0,0 +1,119 @@ +"""Tests related to use of noqa inside playbooks.""" +import pytest + +from ansiblelint.testing import RunFromText + +PLAYBOOK_PRE_TASKS = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 2 + action: ansible.builtin.git a=b c=d + pre_tasks: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d +""" + +PLAYBOOK_POST_TASKS = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 2 + action: ansible.builtin.git a=b c=d + post_tasks: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d +""" + +PLAYBOOK_HANDLERS = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 2 + action: ansible.builtin.git a=b c=d + handlers: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d +""" + +PLAYBOOK_TWO_PLAYS = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 2 + action: ansible.builtin.git a=b c=d + +- name: Fixture 2 + hosts: all + tasks: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d +""" + +PLAYBOOK_WITH_BLOCK = """\ +--- +- name: Fixture + hosts: all + tasks: + - name: Bad git 1 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 2 + action: ansible.builtin.git a=b c=d + - name: Block with rescue and always section + block: + - name: Bad git 3 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 4 + action: ansible.builtin.git a=b c=d + rescue: + - name: Bad git 5 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 6 + action: ansible.builtin.git a=b c=d + always: + - name: Bad git 7 # noqa: latest[git] + action: ansible.builtin.git a=b c=d + - name: Bad git 8 + action: ansible.builtin.git a=b c=d +""" + + +@pytest.mark.parametrize( + ("playbook", "length"), + ( + pytest.param(PLAYBOOK_PRE_TASKS, 6, id="PRE_TASKS"), + pytest.param(PLAYBOOK_POST_TASKS, 6, id="POST_TASKS"), + pytest.param(PLAYBOOK_HANDLERS, 6, id="HANDLERS"), + pytest.param(PLAYBOOK_TWO_PLAYS, 6, id="TWO_PLAYS"), + pytest.param(PLAYBOOK_WITH_BLOCK, 12, id="WITH_BLOCK"), + ), +) +def test_pre_tasks( + default_text_runner: RunFromText, playbook: str, length: int +) -> None: + """Check that skipping is possible in different playbook parts.""" + # When + results = default_text_runner.run_playbook(playbook) + + # Then + assert len(results) == length diff --git a/test/test_skiputils.py b/test/test_skiputils.py new file mode 100644 index 0000000..5aff266 --- /dev/null +++ b/test/test_skiputils.py @@ -0,0 +1,230 @@ +"""Validate ansiblelint.skip_utils.""" +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Any + +import pytest + +from ansiblelint.constants import SKIPPED_RULES_KEY +from ansiblelint.file_utils import Lintable +from ansiblelint.skip_utils import ( + append_skipped_rules, + get_rule_skips_from_line, + is_nested_task, +) +from ansiblelint.testing import RunFromText + +if TYPE_CHECKING: + from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject + +PLAYBOOK_WITH_NOQA = """\ +--- +- name: Fixture + hosts: all + vars: + SOME_VAR_NOQA: "Foo" # noqa: var-naming + SOME_VAR: "Bar" + tasks: + - name: "Set the SOME_OTHER_VAR" + ansible.builtin.set_fact: + SOME_OTHER_VAR_NOQA: "Baz" # noqa: var-naming + SOME_OTHER_VAR: "Bat" +""" + + +@pytest.mark.parametrize( + ("line", "expected"), + ( + ("foo # noqa: bar", "bar"), + ("foo # noqa bar", "bar"), + ), +) +def test_get_rule_skips_from_line(line: str, expected: str) -> None: + """Validate get_rule_skips_from_line.""" + v = get_rule_skips_from_line(line) + assert v == [expected] + + +def test_playbook_noqa(default_text_runner: RunFromText) -> None: + """Check that noqa is properly taken into account on vars and tasks.""" + results = default_text_runner.run_playbook(PLAYBOOK_WITH_NOQA) + # Should raise error at "SOME_VAR". + assert len(results) == 1 + + +def test_playbook_noqa2(default_text_runner: RunFromText) -> None: + """Check that noqa is properly taken into account on vars and tasks.""" + results = default_text_runner.run_playbook(PLAYBOOK_WITH_NOQA, "test") + # Should raise error at "SOME_VAR". + assert len(results) == 1 + + +@pytest.mark.parametrize( + ("lintable", "yaml", "expected_form"), + ( + pytest.param( + Lintable("examples/playbooks/noqa.yml", kind="playbook"), + [ + { + "hosts": "localhost", + "tasks": [ + { + "name": "This would typically fire latest[git] and partial-become", + "become_user": "alice", + "git": "src=/path/to/git/repo dest=checkout", + "__line__": 4, + "__file__": Path("examples/playbooks/noqa.yml"), + } + ], + "__line__": 2, + "__file__": Path("examples/playbooks/noqa.yml"), + } + ], + [ + { + "hosts": "localhost", + "tasks": [ + { + "name": "This would typically fire latest[git] and partial-become", + "become_user": "alice", + "git": "src=/path/to/git/repo dest=checkout", + "__line__": 4, + "__file__": Path("examples/playbooks/noqa.yml"), + SKIPPED_RULES_KEY: ["latest[git]", "partial-become"], + } + ], + "__line__": 2, + "__file__": Path("examples/playbooks/noqa.yml"), + } + ], + ), + pytest.param( + Lintable("examples/playbooks/noqa-nested.yml", kind="playbook"), + [ + { + "hosts": "localhost", + "tasks": [ + { + "name": "Example of multi-level block", + "block": [ + { + "name": "2nd level", + "block": [ + { + "ansible.builtin.debug": { + "msg": "Test unnamed task in block", + "__line__": 9, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + }, + "__line__": 8, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + } + ], + "__line__": 6, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + } + ], + "__line__": 4, + "__file__": Path("examples/playbooks/noqa-nested.yml"), + } + ], + "__line__": 2, + "__file__": Path("examples/playbooks/noqa-nested.yml"), + } + ], + [ + { + "hosts": "localhost", + "tasks": [ + { + "name": "Example of multi-level block", + "block": [ + { + "name": "2nd level", + "block": [ + { + "ansible.builtin.debug": { + "msg": "Test unnamed task in block", + "__line__": 9, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + }, + "__line__": 8, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + SKIPPED_RULES_KEY: ["name[missing]"], + } + ], + "__line__": 6, + "__file__": Path( + "examples/playbooks/noqa-nested.yml" + ), + SKIPPED_RULES_KEY: ["name[missing]"], + } + ], + "__line__": 4, + "__file__": Path("examples/playbooks/noqa-nested.yml"), + SKIPPED_RULES_KEY: ["name[missing]"], + } + ], + "__line__": 2, + "__file__": Path("examples/playbooks/noqa-nested.yml"), + } + ], + ), + ), +) +def test_append_skipped_rules( + lintable: Lintable, + yaml: AnsibleBaseYAMLObject, + expected_form: AnsibleBaseYAMLObject, +) -> None: + """Check that it appends skipped_rules properly.""" + assert append_skipped_rules(yaml, lintable) == expected_form + + +@pytest.mark.parametrize( + ("task", "expected"), + ( + pytest.param( + { + "name": "ensure apache is at the latest version", + "yum": {"name": "httpd", "state": "latest"}, + }, + False, + ), + pytest.param( + { + "name": "Attempt and graceful roll back", + "block": [ + {"name": "Force a failure", "ansible.builtin.command": "/bin/false"} + ], + "rescue": [ + { + "name": "Force a failure in middle of recovery!", + "ansible.builtin.command": "/bin/false", + } + ], + "always": [ + { + "name": "Always do this", + "ansible.builtin.debug": {"msg": "This always executes"}, + } + ], + }, + True, + ), + ), +) +def test_is_nested_task(task: dict[str, Any], expected: bool) -> None: + """Test is_nested_task() returns expected bool.""" + assert is_nested_task(task) == expected diff --git a/test/test_strict.py b/test/test_strict.py new file mode 100644 index 0000000..9a15619 --- /dev/null +++ b/test/test_strict.py @@ -0,0 +1,26 @@ +"""Test strict mode.""" +import pytest + +from ansiblelint.testing import run_ansible_lint + + +@pytest.mark.parametrize( + ("strict", "returncode", "message"), + ( + pytest.param(True, 2, "Failed", id="on"), + pytest.param(False, 0, "Passed", id="off"), + ), +) +def test_strict(strict: bool, returncode: int, message: str) -> None: + """Test running from inside meta folder.""" + args = ["examples/playbooks/strict-mode.yml"] + if strict: + args.insert(0, "--strict") + result = run_ansible_lint(*args) + assert result.returncode == returncode + assert "args[module]" in result.stdout + for summary_line in result.stderr.splitlines(): + if summary_line.startswith(message): + break + else: + pytest.fail(f"Failed to find {message} inside stderr output") diff --git a/test/test_task_includes.py b/test/test_task_includes.py new file mode 100644 index 0000000..3b02d00 --- /dev/null +++ b/test/test_task_includes.py @@ -0,0 +1,47 @@ +"""Tests related to task inclusions.""" +import pytest + +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + + +@pytest.mark.parametrize( + ("filename", "file_count", "match_count"), + ( + pytest.param("examples/playbooks/blockincludes.yml", 4, 3, id="blockincludes"), + pytest.param( + "examples/playbooks/blockincludes2.yml", + 4, + 3, + id="blockincludes2", + ), + pytest.param("examples/playbooks/taskincludes.yml", 3, 6, id="taskincludes"), + pytest.param("examples/playbooks/taskimports.yml", 5, 3, id="taskimports"), + pytest.param( + "examples/playbooks/include-in-block.yml", + 3, + 1, + id="include-in-block", + ), + pytest.param( + "examples/playbooks/include-import-tasks-in-role.yml", + 4, + 2, + id="role_with_task_inclusions", + ), + ), +) +def test_included_tasks( + default_rules_collection: RulesCollection, + filename: str, + file_count: int, + match_count: int, +) -> None: + """Check if number of loaded files is correct.""" + lintable = Lintable(filename) + default_rules_collection.options.enable_list = ["name[prefix]"] + runner = Runner(lintable, rules=default_rules_collection) + result = runner.run() + assert len(runner.lintables) == file_count + assert len(result) == match_count diff --git a/test/test_text.py b/test/test_text.py new file mode 100644 index 0000000..fa91fee --- /dev/null +++ b/test/test_text.py @@ -0,0 +1,75 @@ +"""Tests for text module.""" +from typing import Any + +import pytest + +from ansiblelint.text import has_glob, has_jinja, strip_ansi_escape, toidentifier + + +@pytest.mark.parametrize( + ("value", "expected"), + ( + pytest.param("\x1b[1;31mHello", "Hello", id="0"), + pytest.param("\x1b[2;37;41mExample_file.zip", "Example_file.zip", id="1"), + pytest.param(b"ansible-lint", "ansible-lint", id="2"), + ), +) +def test_strip_ansi_escape(value: Any, expected: str) -> None: + """Tests for strip_ansi_escape().""" + assert strip_ansi_escape(value) == expected + + +@pytest.mark.parametrize( + ("value", "expected"), + ( + pytest.param("foo-bar", "foo_bar", id="0"), + pytest.param("foo--bar", "foo_bar", id="1"), + ), +) +def test_toidentifier(value: Any, expected: str) -> None: + """Tests for toidentifier().""" + assert toidentifier(value) == expected + + +@pytest.mark.parametrize( + ("value", "expected"), + (pytest.param("example_test.zip", "Unable to convert role name", id="0"),), +) +def test_toidentifier_fail(value: Any, expected: str) -> None: + """Tests for toidentifier().""" + with pytest.raises(RuntimeError) as err: + toidentifier(value) + assert str(err.value).find(expected) > -1 + + +@pytest.mark.parametrize( + ("value", "expected"), + ( + pytest.param("", False, id="0"), + pytest.param("{{ }}", True, id="1"), + pytest.param("foo {# #} bar", True, id="2"), + pytest.param("foo \n{% %} bar", True, id="3"), + pytest.param(None, False, id="4"), + pytest.param(42, False, id="5"), + pytest.param(True, False, id="6"), + ), +) +def test_has_jinja(value: Any, expected: bool) -> None: + """Tests for has_jinja().""" + assert has_jinja(value) == expected + + +@pytest.mark.parametrize( + ("value", "expected"), + ( + pytest.param("", False, id="0"), + pytest.param("*", True, id="1"), + pytest.param("foo.*", True, id="2"), + pytest.param(None, False, id="4"), + pytest.param(42, False, id="5"), + pytest.param(True, False, id="6"), + ), +) +def test_has_glob(value: Any, expected: bool) -> None: + """Tests for has_jinja().""" + assert has_glob(value) == expected diff --git a/test/test_transform_mixin.py b/test/test_transform_mixin.py new file mode 100644 index 0000000..5efdf37 --- /dev/null +++ b/test/test_transform_mixin.py @@ -0,0 +1,133 @@ +"""Tests for TransformMixin.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest + +from ansiblelint.rules import TransformMixin + +if TYPE_CHECKING: + from typing import Any, Dict, List, MutableMapping, MutableSequence, Type, Union + + +DUMMY_MAP: dict[str, Any] = { + "foo": "text", + "bar": {"some": "text2"}, + "fruits": ["apple", "orange"], + "answer": [{"forty-two": ["life", "universe", "everything"]}], +} +DUMMY_LIST: list[dict[str, Any]] = [ + {"foo": "text"}, + {"bar": {"some": "text2"}, "fruits": ["apple", "orange"]}, + {"answer": [{"forty-two": ["life", "universe", "everything"]}]}, +] + + +@pytest.mark.parametrize( + ("yaml_path", "data", "expected_error"), + ( + ([0], DUMMY_MAP, KeyError), + (["bar", 0], DUMMY_MAP, KeyError), + (["fruits", 100], DUMMY_MAP, IndexError), + (["answer", 1], DUMMY_MAP, IndexError), + (["answer", 0, 42], DUMMY_MAP, KeyError), + (["answer", 0, "42"], DUMMY_MAP, KeyError), + ([100], DUMMY_LIST, IndexError), + ([0, 0], DUMMY_LIST, KeyError), + ([0, "wrong key"], DUMMY_LIST, KeyError), + ([1, "bar", "wrong key"], DUMMY_LIST, KeyError), + ([1, "fruits", "index should be int"], DUMMY_LIST, TypeError), + ([1, "fruits", 100], DUMMY_LIST, IndexError), + ), +) +def test_seek_with_bad_path( + yaml_path: list[int | str], + data: MutableMapping[str, Any] | MutableSequence[Any] | str, + expected_error: type[Exception], +) -> None: + """Verify that TransformMixin.seek() propagates errors.""" + with pytest.raises(expected_error): + TransformMixin.seek(yaml_path, data) + + +@pytest.mark.parametrize( + ("yaml_path", "data", "expected"), + ( + ([], DUMMY_MAP, DUMMY_MAP), + (["foo"], DUMMY_MAP, DUMMY_MAP["foo"]), + (["bar"], DUMMY_MAP, DUMMY_MAP["bar"]), + (["bar", "some"], DUMMY_MAP, DUMMY_MAP["bar"]["some"]), + (["fruits"], DUMMY_MAP, DUMMY_MAP["fruits"]), + (["fruits", 0], DUMMY_MAP, DUMMY_MAP["fruits"][0]), + (["fruits", 1], DUMMY_MAP, DUMMY_MAP["fruits"][1]), + (["answer"], DUMMY_MAP, DUMMY_MAP["answer"]), + (["answer", 0], DUMMY_MAP, DUMMY_MAP["answer"][0]), + (["answer", 0, "forty-two"], DUMMY_MAP, DUMMY_MAP["answer"][0]["forty-two"]), + ( + ["answer", 0, "forty-two", 0], + DUMMY_MAP, + DUMMY_MAP["answer"][0]["forty-two"][0], + ), + ( + ["answer", 0, "forty-two", 1], + DUMMY_MAP, + DUMMY_MAP["answer"][0]["forty-two"][1], + ), + ( + ["answer", 0, "forty-two", 2], + DUMMY_MAP, + DUMMY_MAP["answer"][0]["forty-two"][2], + ), + ([], DUMMY_LIST, DUMMY_LIST), + ([0], DUMMY_LIST, DUMMY_LIST[0]), + ([0, "foo"], DUMMY_LIST, DUMMY_LIST[0]["foo"]), + ([1], DUMMY_LIST, DUMMY_LIST[1]), + ([1, "bar"], DUMMY_LIST, DUMMY_LIST[1]["bar"]), + ([1, "bar", "some"], DUMMY_LIST, DUMMY_LIST[1]["bar"]["some"]), + ([1, "fruits"], DUMMY_LIST, DUMMY_LIST[1]["fruits"]), + ([1, "fruits", 0], DUMMY_LIST, DUMMY_LIST[1]["fruits"][0]), + ([1, "fruits", 1], DUMMY_LIST, DUMMY_LIST[1]["fruits"][1]), + ([2], DUMMY_LIST, DUMMY_LIST[2]), + ([2, "answer"], DUMMY_LIST, DUMMY_LIST[2]["answer"]), + ([2, "answer", 0], DUMMY_LIST, DUMMY_LIST[2]["answer"][0]), + ( + [2, "answer", 0, "forty-two"], + DUMMY_LIST, + DUMMY_LIST[2]["answer"][0]["forty-two"], + ), + ( + [2, "answer", 0, "forty-two", 0], + DUMMY_LIST, + DUMMY_LIST[2]["answer"][0]["forty-two"][0], + ), + ( + [2, "answer", 0, "forty-two", 1], + DUMMY_LIST, + DUMMY_LIST[2]["answer"][0]["forty-two"][1], + ), + ( + [2, "answer", 0, "forty-two", 2], + DUMMY_LIST, + DUMMY_LIST[2]["answer"][0]["forty-two"][2], + ), + ( + [], + "this is a string that should be returned as is, ignoring path.", + "this is a string that should be returned as is, ignoring path.", + ), + ( + [2, "answer", 0, "forty-two", 2], + "this is a string that should be returned as is, ignoring path.", + "this is a string that should be returned as is, ignoring path.", + ), + ), +) +def test_seek( + yaml_path: list[int | str], + data: MutableMapping[str, Any] | MutableSequence[Any] | str, + expected: Any, +) -> None: + """Ensure TransformMixin.seek() retrieves the correct data.""" + actual = TransformMixin.seek(yaml_path, data) + assert actual == expected diff --git a/test/test_transformer.py b/test/test_transformer.py new file mode 100644 index 0000000..4b593c0 --- /dev/null +++ b/test/test_transformer.py @@ -0,0 +1,156 @@ +"""Tests for Transformer.""" +from __future__ import annotations + +import os +import pathlib +import shutil +from argparse import Namespace +from typing import Iterator + +import pytest + +from ansiblelint.rules import RulesCollection + +# noinspection PyProtectedMember +from ansiblelint.runner import LintResult, _get_matches +from ansiblelint.transformer import Transformer + + +@pytest.fixture(name="copy_examples_dir") +def fixture_copy_examples_dir( + tmp_path: pathlib.Path, config_options: Namespace +) -> Iterator[tuple[pathlib.Path, pathlib.Path]]: + """Fixture that copies the examples/ dir into a tmpdir.""" + examples_dir = pathlib.Path("examples") + + shutil.copytree(examples_dir, tmp_path / "examples") + old_cwd = os.getcwd() + try: + os.chdir(tmp_path) + config_options.cwd = tmp_path + yield pathlib.Path(old_cwd), tmp_path + finally: + os.chdir(old_cwd) + + +@pytest.fixture(name="runner_result") +def fixture_runner_result( + config_options: Namespace, + default_rules_collection: RulesCollection, + playbook: str, +) -> LintResult: + """Fixture that runs the Runner to populate a LintResult for a given file.""" + config_options.lintables = [playbook] + result = _get_matches(rules=default_rules_collection, options=config_options) + return result + + +@pytest.mark.parametrize( + ("playbook", "matches_count", "transformed"), + ( + # reuse TestRunner::test_runner test cases to ensure transformer does not mangle matches + pytest.param( + "examples/playbooks/nomatchestest.yml", 0, False, id="nomatchestest" + ), + pytest.param("examples/playbooks/unicode.yml", 1, False, id="unicode"), + pytest.param( + "examples/playbooks/lots_of_warnings.yml", 992, False, id="lots_of_warnings" + ), + pytest.param("examples/playbooks/become.yml", 0, False, id="become"), + pytest.param( + "examples/playbooks/contains_secrets.yml", 0, False, id="contains_secrets" + ), + pytest.param( + "examples/playbooks/vars/empty_vars.yml", 0, False, id="empty_vars" + ), + pytest.param("examples/playbooks/vars/strings.yml", 0, True, id="strings"), + ), +) +def test_transformer( # pylint: disable=too-many-arguments, too-many-locals + config_options: Namespace, + copy_examples_dir: tuple[pathlib.Path, pathlib.Path], + playbook: str, + runner_result: LintResult, + transformed: bool, + matches_count: int, +) -> None: + """ + Test that transformer can go through any corner cases. + + Based on TestRunner::test_runner + """ + setattr(config_options, "write_list", ["all"]) + transformer = Transformer(result=runner_result, options=config_options) + transformer.run() + + matches = runner_result.matches + assert len(matches) == matches_count + + orig_dir, tmp_dir = copy_examples_dir + orig_playbook = orig_dir / playbook + expected_playbook = orig_dir / playbook.replace(".yml", ".transformed.yml") + transformed_playbook = tmp_dir / playbook + + orig_playbook_content = orig_playbook.read_text() + expected_playbook_content = expected_playbook.read_text() + transformed_playbook_content = transformed_playbook.read_text() + + if transformed: + assert orig_playbook_content != transformed_playbook_content + else: + assert orig_playbook_content == transformed_playbook_content + + assert transformed_playbook_content == expected_playbook_content + + +@pytest.mark.parametrize( + ("write_list", "expected"), + ( + # 1 item + (["all"], {"all"}), + (["none"], {"none"}), + (["rule-id"], {"rule-id"}), + # 2 items + (["all", "all"], {"all"}), + (["all", "none"], {"none"}), + (["all", "rule-id"], {"all"}), + (["none", "all"], {"all"}), + (["none", "none"], {"none"}), + (["none", "rule-id"], {"rule-id"}), + (["rule-id", "all"], {"all"}), + (["rule-id", "none"], {"none"}), + (["rule-id", "rule-id"], {"rule-id"}), + # 3 items + (["all", "all", "all"], {"all"}), + (["all", "all", "none"], {"none"}), + (["all", "all", "rule-id"], {"all"}), + (["all", "none", "all"], {"all"}), + (["all", "none", "none"], {"none"}), + (["all", "none", "rule-id"], {"rule-id"}), + (["all", "rule-id", "all"], {"all"}), + (["all", "rule-id", "none"], {"none"}), + (["all", "rule-id", "rule-id"], {"all"}), + (["none", "all", "all"], {"all"}), + (["none", "all", "none"], {"none"}), + (["none", "all", "rule-id"], {"all"}), + (["none", "none", "all"], {"all"}), + (["none", "none", "none"], {"none"}), + (["none", "none", "rule-id"], {"rule-id"}), + (["none", "rule-id", "all"], {"all"}), + (["none", "rule-id", "none"], {"none"}), + (["none", "rule-id", "rule-id"], {"rule-id"}), + (["rule-id", "all", "all"], {"all"}), + (["rule-id", "all", "none"], {"none"}), + (["rule-id", "all", "rule-id"], {"all"}), + (["rule-id", "none", "all"], {"all"}), + (["rule-id", "none", "none"], {"none"}), + (["rule-id", "none", "rule-id"], {"rule-id"}), + (["rule-id", "rule-id", "all"], {"all"}), + (["rule-id", "rule-id", "none"], {"none"}), + (["rule-id", "rule-id", "rule-id"], {"rule-id"}), + ), +) +def test_effective_write_set(write_list: list[str], expected: set[str]) -> None: + """Make sure effective_write_set handles all/none keywords correctly.""" + actual = Transformer.effective_write_set(write_list) + assert actual == expected diff --git a/test/test_utils.py b/test/test_utils.py new file mode 100644 index 0000000..ca2461c --- /dev/null +++ b/test/test_utils.py @@ -0,0 +1,405 @@ +# Copyright (c) 2013-2014 Will Thames <will@thames.id.au> +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +"""Tests for generic utility functions.""" +from __future__ import annotations + +import logging +import subprocess +import sys +from argparse import Namespace +from pathlib import Path +from typing import Any, Sequence + +import pytest +from _pytest.capture import CaptureFixture +from _pytest.logging import LogCaptureFixture +from _pytest.monkeypatch import MonkeyPatch +from ansible.utils.sentinel import Sentinel +from ansible_compat.runtime import Runtime + +from ansiblelint import cli, constants, utils +from ansiblelint.__main__ import initialize_logger +from ansiblelint.cli import get_rules_dirs +from ansiblelint.constants import VIOLATIONS_FOUND_RC +from ansiblelint.file_utils import Lintable +from ansiblelint.rules import RulesCollection +from ansiblelint.runner import Runner + +runtime = Runtime(require_module=True) + + +@pytest.mark.parametrize( + ("string", "expected_cmd", "expected_args", "expected_kwargs"), + ( + pytest.param("", "", [], {}, id="blank"), + pytest.param("vars:", "vars", [], {}, id="single_word"), + pytest.param("hello: a=1", "hello", [], {"a": "1"}, id="string_module_and_arg"), + pytest.param("action: hello a=1", "hello", [], {"a": "1"}, id="strips_action"), + pytest.param( + "action: whatever bobbins x=y z=x c=3", + "whatever", + ["bobbins", "x=y", "z=x", "c=3"], + {}, + id="more_than_one_arg", + ), + pytest.param( + "action: command chdir=wxy creates=zyx tar xzf zyx.tgz", + "command", + ["tar", "xzf", "zyx.tgz"], + {"chdir": "wxy", "creates": "zyx"}, + id="command_with_args", + ), + ), +) +def test_tokenize( + string: str, + expected_cmd: str, + expected_args: Sequence[str], + expected_kwargs: dict[str, Any], +) -> None: + """Test that tokenize works for different input types.""" + (cmd, args, kwargs) = utils.tokenize(string) + assert cmd == expected_cmd + assert args == expected_args + assert kwargs == expected_kwargs + + +@pytest.mark.parametrize( + ("reference_form", "alternate_forms"), + ( + pytest.param( + {"name": "hello", "action": "command chdir=abc echo hello world"}, + ({"name": "hello", "command": "chdir=abc echo hello world"},), + id="simple_command", + ), + pytest.param( + {"git": {"version": "abc"}, "args": {"repo": "blah", "dest": "xyz"}}, + ( + {"git": {"version": "abc", "repo": "blah", "dest": "xyz"}}, + {"git": "version=abc repo=blah dest=xyz"}, + { + "git": None, + "args": {"repo": "blah", "dest": "xyz", "version": "abc"}, + }, + ), + id="args", + ), + ), +) +def test_normalize( + reference_form: dict[str, Any], alternate_forms: tuple[dict[str, Any]] +) -> None: + """Test that tasks specified differently are normalized same way.""" + normal_form = utils.normalize_task(reference_form, "tasks.yml") + + for form in alternate_forms: + assert normal_form == utils.normalize_task(form, "tasks.yml") + + +def test_normalize_complex_command() -> None: + """Test that tasks specified differently are normalized same way.""" + task1 = { + "name": "hello", + "action": {"module": "pip", "name": "df", "editable": "false"}, + } + task2 = {"name": "hello", "pip": {"name": "df", "editable": "false"}} + task3 = {"name": "hello", "pip": "name=df editable=false"} + task4 = {"name": "hello", "action": "pip name=df editable=false"} + assert utils.normalize_task(task1, "tasks.yml") == utils.normalize_task( + task2, "tasks.yml" + ) + assert utils.normalize_task(task2, "tasks.yml") == utils.normalize_task( + task3, "tasks.yml" + ) + assert utils.normalize_task(task3, "tasks.yml") == utils.normalize_task( + task4, "tasks.yml" + ) + + +@pytest.mark.parametrize( + ("task", "expected_form"), + ( + pytest.param( + { + "name": "ensure apache is at the latest version", + "yum": {"name": "httpd", "state": "latest"}, + }, + { + "delegate_to": Sentinel, + "name": "ensure apache is at the latest version", + "action": { + "__ansible_module__": "yum", + "__ansible_module_original__": "yum", + "__ansible_arguments__": [], + "name": "httpd", + "state": "latest", + }, + }, + ), + pytest.param( + { + "name": "Attempt and graceful roll back", + "block": [ + { + "name": "Install httpd and memcached", + "ansible.builtin.yum": ["httpd", "memcached"], + "state": "present", + } + ], + }, + { + "name": "Attempt and graceful roll back", + "block": [ + { + "name": "Install httpd and memcached", + "ansible.builtin.yum": ["httpd", "memcached"], + "state": "present", + } + ], + "action": { + "__ansible_module__": "block/always/rescue", + "__ansible_module_original__": "block/always/rescue", + }, + }, + ), + ), +) +def test_normalize_task_v2(task: dict[str, Any], expected_form: dict[str, Any]) -> None: + """Check that it normalizes task and returns the expected form.""" + assert utils.normalize_task_v2(task) == expected_form + + +def test_extract_from_list() -> None: + """Check that tasks get extracted from blocks if present.""" + block = { + "block": [{"tasks": {"name": "hello", "command": "whoami"}}], + "test_none": None, + "test_string": "foo", + } + blocks = [block] + + test_list = utils.extract_from_list(blocks, ["block"]) + test_none = utils.extract_from_list(blocks, ["test_none"]) + + assert list(block["block"]) == test_list # type: ignore + assert not test_none + with pytest.raises(RuntimeError): + utils.extract_from_list(blocks, ["test_string"]) + + +def test_extract_from_list_recursive() -> None: + """Check that tasks get extracted from blocks if present.""" + block = { + "block": [{"block": [{"name": "hello", "command": "whoami"}]}], + } + blocks = [block] + + test_list = utils.extract_from_list(blocks, ["block"]) + assert list(block["block"]) == test_list + + test_list_recursive = utils.extract_from_list(blocks, ["block"], recursive=True) + assert block["block"] + block["block"][0]["block"] == test_list_recursive + + +@pytest.mark.parametrize( + ("template", "output"), + ( + pytest.param("{{ playbook_dir }}", "/a/b/c", id="simple"), + pytest.param( + "{{ 'hello' | doesnotexist }}", + "hello", # newer implementation ignores unknown filters + id="unknown_filter", + ), + pytest.param( + "{{ hello | to_json }}", + "{{ hello | to_json }}", + id="to_json_filter_on_undefined_variable", + ), + pytest.param( + "{{ hello | to_nice_yaml }}", + "{{ hello | to_nice_yaml }}", + id="to_nice_yaml_filter_on_undefined_variable", + ), + ), +) +def test_template(template: str, output: str) -> None: + """Verify that resolvable template vars and filters get rendered.""" + result = utils.template( + basedir="/base/dir", + value=template, + variables={"playbook_dir": "/a/b/c"}, + fail_on_error=False, + ) + assert result == output + + +def test_task_to_str_unicode() -> None: + """Ensure that extracting messages from tasks preserves Unicode.""" + task = {"fail": {"msg": "unicode é ô à "}} + result = utils.task_to_str(utils.normalize_task(task, "filename.yml")) + assert result == "fail msg=unicode é ô à " + + +def test_logger_debug(caplog: LogCaptureFixture) -> None: + """Test that the double verbosity arg causes logger to be DEBUG.""" + options = cli.get_config(["-vv"]) + initialize_logger(options.verbosity) + + expected_info = ( + "ansiblelint.__main__", + logging.DEBUG, + "Logging initialized to level 10", + ) + + assert expected_info in caplog.record_tuples + + +def test_cli_auto_detect(capfd: CaptureFixture[str]) -> None: + """Test that run without arguments it will detect and lint the entire repository.""" + cmd = [ + sys.executable, + "-m", + "ansiblelint", + "-x", + "schema", # exclude schema as our test file would fail it + "-v", + "-p", + "--nocolor", + ] + result = subprocess.run(cmd, check=False).returncode + + # We de expect to fail on our own repo due to test examples we have + assert result == VIOLATIONS_FOUND_RC + + out, err = capfd.readouterr() + + # Confirmation that it runs in auto-detect mode + assert "Discovered files to lint using: git" in err + assert "Excluded removed files using: git" in err + # An expected rule match from our examples + assert ( + "examples/playbooks/empty_playbook.yml:1:1: " + "syntax-check[empty-playbook]: Empty playbook, nothing to do" in out + ) + # assures that our ansible-lint config exclude was effective in excluding github files + assert "Identified: .github/" not in out + # assures that we can parse playbooks as playbooks + assert "Identified: test/test/always-run-success.yml" not in err + assert ( + "Executing syntax check on playbook examples/playbooks/mocked_dependency.yml" + in err + ) + + +def test_is_playbook() -> None: + """Verify that we can detect a playbook as a playbook.""" + assert utils.is_playbook("examples/playbooks/always-run-success.yml") + + +def test_auto_detect_exclude(monkeypatch: MonkeyPatch) -> None: + """Verify that exclude option can be used to narrow down detection.""" + options = cli.get_config(["--exclude", "foo"]) + + # pylint: disable=unused-argument + def mockreturn(options: Namespace) -> list[str]: + return ["foo/playbook.yml", "bar/playbook.yml"] + + monkeypatch.setattr(utils, "discover_lintables", mockreturn) + result = utils.get_lintables(options) + assert result == [Lintable("bar/playbook.yml", kind="playbook")] + + +_DEFAULT_RULEDIRS = [constants.DEFAULT_RULESDIR] +_CUSTOM_RULESDIR = Path(__file__).parent / "custom_rules" +_CUSTOM_RULEDIRS = [ + str(_CUSTOM_RULESDIR / "example_inc"), + str(_CUSTOM_RULESDIR / "example_com"), +] + + +@pytest.mark.parametrize( + ("user_ruledirs", "use_default", "expected"), + ( + ([], True, _DEFAULT_RULEDIRS), + ([], False, _DEFAULT_RULEDIRS), + (_CUSTOM_RULEDIRS, True, _CUSTOM_RULEDIRS + _DEFAULT_RULEDIRS), + (_CUSTOM_RULEDIRS, False, _CUSTOM_RULEDIRS), + ), +) +def test_get_rules_dirs( + user_ruledirs: list[str], use_default: bool, expected: list[str] +) -> None: + """Test it returns expected dir lists.""" + assert get_rules_dirs(user_ruledirs, use_default) == expected + + +@pytest.mark.parametrize( + ("user_ruledirs", "use_default", "expected"), + ( + ([], True, sorted(_CUSTOM_RULEDIRS) + _DEFAULT_RULEDIRS), + ([], False, sorted(_CUSTOM_RULEDIRS) + _DEFAULT_RULEDIRS), + ( + _CUSTOM_RULEDIRS, + True, + _CUSTOM_RULEDIRS + sorted(_CUSTOM_RULEDIRS) + _DEFAULT_RULEDIRS, + ), + (_CUSTOM_RULEDIRS, False, _CUSTOM_RULEDIRS), + ), +) +def test_get_rules_dirs_with_custom_rules( + user_ruledirs: list[str], + use_default: bool, + expected: list[str], + monkeypatch: MonkeyPatch, +) -> None: + """Test it returns expected dir lists when custom rules exist.""" + monkeypatch.setenv(constants.CUSTOM_RULESDIR_ENVVAR, str(_CUSTOM_RULESDIR)) + assert get_rules_dirs(user_ruledirs, use_default) == expected + + +def test_nested_items() -> None: + """Verify correct function of nested_items().""" + data = {"foo": "text", "bar": {"some": "text2"}, "fruits": ["apple", "orange"]} + + items = [ + ("foo", "text", ""), + ("bar", {"some": "text2"}, ""), + ("some", "text2", "bar"), + ("fruits", ["apple", "orange"], ""), + ("list-item", "apple", "fruits"), + ("list-item", "orange", "fruits"), + ] + with pytest.deprecated_call( + match=r"Call to deprecated function ansiblelint\.utils\.nested_items.*" + ): + assert list(utils.nested_items(data)) == items + + +def test_find_children() -> None: + """Verify correct function of find_children().""" + utils.find_children(Lintable("examples/playbooks/find_children.yml")) + + +def test_find_children_in_task(default_rules_collection: RulesCollection) -> None: + """Verify correct function of find_children() in tasks.""" + Runner( + Lintable("examples/playbooks/tasks/bug-2875.yml"), + rules=default_rules_collection, + ).run() diff --git a/test/test_verbosity.py b/test/test_verbosity.py new file mode 100644 index 0000000..18d7fd5 --- /dev/null +++ b/test/test_verbosity.py @@ -0,0 +1,96 @@ +"""Tests related to our logging/verbosity setup.""" +from __future__ import annotations + +import os + +import pytest + +from ansiblelint.testing import run_ansible_lint + + +# substrs is a list of tuples, where: +# component 1 is the substring in question +# component 2 is whether or not to invert ("NOT") the match +@pytest.mark.parametrize( + ("verbosity", "substrs"), + ( + ( + "", + [ + ("WARNING Listing 1 violation(s) that are fatal", False), + ("DEBUG ", True), + ("INFO ", True), + ], + ), + ( + "-q", + [ + ("WARNING ", True), + ("DEBUG ", True), + ("INFO ", True), + ], + ), + ( + "-qq", + [ + ("WARNING ", True), + ("DEBUG ", True), + ("INFO ", True), + ], + ), + ( + "-v", + [ + ("WARNING Listing 1 violation(s) that are fatal", False), + ("INFO Set ANSIBLE_LIBRARY=", False), + ("DEBUG ", True), + ], + ), + ( + "-vv", + [ + # ("DEBUG Loading custom .yamllint config file,", False), + ("WARNING Listing 1 violation(s) that are fatal", False), + ("INFO Set ANSIBLE_LIBRARY=", False), + # ("DEBUG Effective yamllint rules used", False), + ], + ), + ( + "-vvvvvvvvvvvvvvvvvvvvvvvvv", + [ + # ("DEBUG Loading custom .yamllint config file,", False), + ("WARNING Listing 1 violation(s) that are fatal", False), + ("INFO Set ANSIBLE_LIBRARY=", False), + # ("DEBUG Effective yamllint rules used", False), + ], + ), + ), + ids=( + "default-verbosity", + "quiet", + "really-quiet", + "loquacious", + "really-loquacious", + 'really-loquacious but with more "v"s -- same as -vv', + ), +) +def test_default_verbosity(verbosity: str, substrs: list[tuple[str, bool]]) -> None: + """Checks that our default verbosity displays (only) warnings.""" + # Piggyback off the .yamllint in the root of the repo, just for testing. + # We'll "override" it with the one in the fixture, to produce a warning. + cwd = os.path.realpath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), "..") + ) + + fakerole = os.path.join("test", "fixtures", "verbosity-tests") + + if verbosity: + result = run_ansible_lint(verbosity, fakerole, cwd=cwd) + else: + result = run_ansible_lint(fakerole, cwd=cwd) + + for substr, invert in substrs: + if invert: + assert substr not in result.stderr, result.stderr + else: + assert substr in result.stderr, result.stderr diff --git a/test/test_with_skip_tagid.py b/test/test_with_skip_tagid.py new file mode 100644 index 0000000..34973f5 --- /dev/null +++ b/test/test_with_skip_tagid.py @@ -0,0 +1,58 @@ +"""Tests related to skip tag id.""" +from ansiblelint.rules import RulesCollection +from ansiblelint.rules.yaml_rule import YamllintRule +from ansiblelint.runner import Runner +from ansiblelint.testing import run_ansible_lint + +FILE = "examples/playbooks/with-skip-tag-id.yml" +collection = RulesCollection() +collection.register(YamllintRule()) + + +def test_negative_no_param() -> None: + """Negative test no param.""" + bad_runner = Runner(FILE, rules=collection) + errs = bad_runner.run() + assert len(errs) > 0 + + +def test_negative_with_id() -> None: + """Negative test with_id.""" + with_id = "yaml" + bad_runner = Runner(FILE, rules=collection, tags=frozenset([with_id])) + errs = bad_runner.run() + assert len(errs) == 1 + + +def test_negative_with_tag() -> None: + """Negative test with_tag.""" + with_tag = "trailing-spaces" + bad_runner = Runner(FILE, rules=collection, tags=frozenset([with_tag])) + errs = bad_runner.run() + assert len(errs) == 1 + + +def test_positive_skip_id() -> None: + """Positive test skip_id.""" + skip_id = "yaml" + good_runner = Runner(FILE, rules=collection, skip_list=[skip_id]) + assert [] == good_runner.run() + + +def test_positive_skip_tag() -> None: + """Positive test skip_tag.""" + skip_tag = "yaml[trailing-spaces]" + good_runner = Runner(FILE, rules=collection, skip_list=[skip_tag]) + assert [] == good_runner.run() + + +def test_run_skip_rule() -> None: + """Test that we can skip a rule with -x.""" + result = run_ansible_lint( + "-x", + "name[casing]", + "examples/playbooks/rule-name-casing.yml", + executable="ansible-lint", + ) + assert result.returncode == 0 + assert result.stdout == "" diff --git a/test/test_yaml_utils.py b/test/test_yaml_utils.py new file mode 100644 index 0000000..8af486b --- /dev/null +++ b/test/test_yaml_utils.py @@ -0,0 +1,905 @@ +"""Tests for yaml-related utility functions.""" +from __future__ import annotations + +from io import StringIO +from pathlib import Path +from typing import Any + +import pytest +from ruamel.yaml.comments import CommentedMap, CommentedSeq +from ruamel.yaml.emitter import Emitter +from ruamel.yaml.main import YAML +from yamllint.linter import run as run_yamllint + +import ansiblelint.yaml_utils +from ansiblelint.file_utils import Lintable + +fixtures_dir = Path(__file__).parent / "fixtures" +formatting_before_fixtures_dir = fixtures_dir / "formatting-before" +formatting_prettier_fixtures_dir = fixtures_dir / "formatting-prettier" +formatting_after_fixtures_dir = fixtures_dir / "formatting-after" + + +@pytest.fixture(name="empty_lintable") +def fixture_empty_lintable() -> Lintable: + """Return a Lintable with no contents.""" + lintable = Lintable("__empty_file__.yaml", content="") + return lintable + + +def test_iter_tasks_in_file_with_empty_file(empty_lintable: Lintable) -> None: + """Make sure that iter_tasks_in_file returns early when files are empty.""" + res = list(ansiblelint.yaml_utils.iter_tasks_in_file(empty_lintable)) + assert not res + + +def test_nested_items_path() -> None: + """Verify correct function of nested_items_path().""" + data = { + "foo": "text", + "bar": {"some": "text2"}, + "fruits": ["apple", "orange"], + "answer": [{"forty-two": ["life", "universe", "everything"]}], + } + + items = [ + ("foo", "text", []), + ("bar", {"some": "text2"}, []), + ("some", "text2", ["bar"]), + ("fruits", ["apple", "orange"], []), + (0, "apple", ["fruits"]), + (1, "orange", ["fruits"]), + ("answer", [{"forty-two": ["life", "universe", "everything"]}], []), + (0, {"forty-two": ["life", "universe", "everything"]}, ["answer"]), + ("forty-two", ["life", "universe", "everything"], ["answer", 0]), + (0, "life", ["answer", 0, "forty-two"]), + (1, "universe", ["answer", 0, "forty-two"]), + (2, "everything", ["answer", 0, "forty-two"]), + ] + assert list(ansiblelint.yaml_utils.nested_items_path(data)) == items + + +@pytest.mark.parametrize( + "invalid_data_input", + ( + "string", + 42, + 1.234, + ("tuple",), + {"set"}, + # NoneType is no longer include, as we assume we have to ignore it + ), +) +def test_nested_items_path_raises_typeerror(invalid_data_input: Any) -> None: + """Verify non-dict/non-list types make nested_items_path() raises TypeError.""" + with pytest.raises(TypeError, match=r"Expected a dict or a list.*"): + list(ansiblelint.yaml_utils.nested_items_path(invalid_data_input)) + + +_input_playbook = [ + { + "name": "It's a playbook", # unambiguous; no quotes needed + "tasks": [ + { + "name": '"fun" task', # should be a single-quoted string + "debug": { + # ruamel.yaml default to single-quotes + # our Emitter defaults to double-quotes + "msg": "{{ msg }}", + }, + } + ], + } +] +_SINGLE_QUOTE_WITHOUT_INDENTS = """\ +--- +- name: It's a playbook + tasks: + - name: '"fun" task' + debug: + msg: '{{ msg }}' +""" +_SINGLE_QUOTE_WITH_INDENTS = """\ +--- + - name: It's a playbook + tasks: + - name: '"fun" task' + debug: + msg: '{{ msg }}' +""" +_DOUBLE_QUOTE_WITHOUT_INDENTS = """\ +--- +- name: It's a playbook + tasks: + - name: '"fun" task' + debug: + msg: "{{ msg }}" +""" +_DOUBLE_QUOTE_WITH_INDENTS_EXCEPT_ROOT_LEVEL = """\ +--- +- name: It's a playbook + tasks: + - name: '"fun" task' + debug: + msg: "{{ msg }}" +""" + + +@pytest.mark.parametrize( + ( + "map_indent", + "sequence_indent", + "sequence_dash_offset", + "alternate_emitter", + "expected_output", + ), + ( + pytest.param( + 2, + 2, + 0, + None, + _SINGLE_QUOTE_WITHOUT_INDENTS, + id="single_quote_without_indents", + ), + pytest.param( + 2, + 4, + 2, + None, + _SINGLE_QUOTE_WITH_INDENTS, + id="single_quote_with_indents", + ), + pytest.param( + 2, + 2, + 0, + ansiblelint.yaml_utils.FormattedEmitter, + _DOUBLE_QUOTE_WITHOUT_INDENTS, + id="double_quote_without_indents", + ), + pytest.param( + 2, + 4, + 2, + ansiblelint.yaml_utils.FormattedEmitter, + _DOUBLE_QUOTE_WITH_INDENTS_EXCEPT_ROOT_LEVEL, + id="double_quote_with_indents_except_root_level", + ), + ), +) +def test_custom_ruamel_yaml_emitter( + map_indent: int, + sequence_indent: int, + sequence_dash_offset: int, + alternate_emitter: Emitter | None, + expected_output: str, +) -> None: + """Test ``ruamel.yaml.YAML.dump()`` sequence formatting and quotes.""" + yaml = YAML(typ="rt") + # NB: ruamel.yaml does not have typehints, so mypy complains about everything here. + yaml.explicit_start = True # type: ignore[assignment] + yaml.map_indent = map_indent # type: ignore[assignment] + yaml.sequence_indent = sequence_indent # type: ignore[assignment] + yaml.sequence_dash_offset = sequence_dash_offset + if alternate_emitter is not None: + yaml.Emitter = alternate_emitter + # ruamel.yaml only writes to a stream (there is no `dumps` function) + with StringIO() as output_stream: + yaml.dump(_input_playbook, output_stream) + output = output_stream.getvalue() + assert output == expected_output + + +@pytest.fixture(name="yaml_formatting_fixtures") +def fixture_yaml_formatting_fixtures(fixture_filename: str) -> tuple[str, str, str]: + """Get the contents for the formatting fixture files. + + To regenerate these fixtures, please run ``pytest --regenerate-formatting-fixtures``. + + Ideally, prettier should not have to change any ``formatting-after`` fixtures. + """ + before_path = formatting_before_fixtures_dir / fixture_filename + prettier_path = formatting_prettier_fixtures_dir / fixture_filename + after_path = formatting_after_fixtures_dir / fixture_filename + before_content = before_path.read_text() + prettier_content = prettier_path.read_text() + formatted_content = after_path.read_text() + return before_content, prettier_content, formatted_content + + +@pytest.mark.parametrize( + "fixture_filename", + ( + "fmt-1.yml", + "fmt-2.yml", + "fmt-3.yml", + ), +) +def test_formatted_yaml_loader_dumper( + yaml_formatting_fixtures: tuple[str, str, str], + fixture_filename: str, +) -> None: + """Ensure that FormattedYAML loads/dumps formatting fixtures consistently.""" + # pylint: disable=unused-argument + before_content, prettier_content, after_content = yaml_formatting_fixtures + assert before_content != prettier_content + assert before_content != after_content + + yaml = ansiblelint.yaml_utils.FormattedYAML() + + data_before = yaml.loads(before_content) + dump_from_before = yaml.dumps(data_before) + data_prettier = yaml.loads(prettier_content) + dump_from_prettier = yaml.dumps(data_prettier) + data_after = yaml.loads(after_content) + dump_from_after = yaml.dumps(data_after) + + # comparing data does not work because the Comment objects + # have different IDs even if contents do not match. + + assert dump_from_before == after_content + assert dump_from_prettier == after_content + assert dump_from_after == after_content + + # We can't do this because FormattedYAML is stricter in some cases: + # assert prettier_content == after_content + # + # Instead, `pytest --regenerate-formatting-fixtures` will fail if prettier would + # change any files in test/fixtures/formatting-after + + # Running our files through yamllint, after we reformatted them, + # should not yield any problems. + config = ansiblelint.yaml_utils.load_yamllint_config() + assert not list(run_yamllint(after_content, config)) + + +@pytest.fixture(name="lintable") +def fixture_lintable(file_path: str) -> Lintable: + """Return a playbook Lintable for use in ``get_path_to_*`` tests.""" + return Lintable(file_path) + + +@pytest.fixture(name="ruamel_data") +def fixture_ruamel_data(lintable: Lintable) -> CommentedMap | CommentedSeq: + """Return the loaded YAML data for the Lintable.""" + yaml = ansiblelint.yaml_utils.FormattedYAML() + data: CommentedMap | CommentedSeq = yaml.loads(lintable.content) + return data + + +@pytest.mark.parametrize( + ("file_path", "line_number", "expected_path"), + ( + # ignored lintables + pytest.param( + "examples/playbooks/tasks/passing_task.yml", 2, [], id="ignore_tasks_file" + ), + pytest.param( + "examples/roles/more_complex/handlers/main.yml", + 2, + [], + id="ignore_handlers_file", + ), + pytest.param("examples/playbooks/vars/other.yml", 2, [], id="ignore_vars_file"), + pytest.param( + "examples/host_vars/localhost.yml", 2, [], id="ignore_host_vars_file" + ), + pytest.param("examples/group_vars/all.yml", 2, [], id="ignore_group_vars_file"), + pytest.param( + "examples/inventory/inventory.yml", 2, [], id="ignore_inventory_file" + ), + pytest.param( + "examples/roles/dependency_in_meta/meta/main.yml", + 2, + [], + id="ignore_meta_file", + ), + pytest.param( + "examples/reqs_v1/requirements.yml", 2, [], id="ignore_requirements_v1_file" + ), + pytest.param( + "examples/reqs_v2/requirements.yml", 2, [], id="ignore_requirements_v2_file" + ), + # we don't have any release notes examples. Oh well. + # pytest.param("examples/", 2, [], id="ignore_release_notes_file"), + pytest.param( + ".pre-commit-config.yaml", 2, [], id="ignore_unrecognized_yaml_file" + ), + # playbook lintables + pytest.param( + "examples/playbooks/become.yml", + 1, + [], + id="1_play_playbook-line_before_play", + ), + pytest.param( + "examples/playbooks/become.yml", + 2, + [0], + id="1_play_playbook-first_line_in_play", + ), + pytest.param( + "examples/playbooks/become.yml", + 10, + [0], + id="1_play_playbook-middle_line_in_play", + ), + pytest.param( + "examples/playbooks/become.yml", + 100, + [0], + id="1_play_playbook-line_after_eof", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 1, + [], + id="4_play_playbook-line_before_play_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 2, + [0], + id="4_play_playbook-first_line_in_play_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 5, + [0], + id="4_play_playbook-middle_line_in_play_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 9, + [0], + id="4_play_playbook-last_line_in_play_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 10, + [1], + id="4_play_playbook-first_line_in_play_2", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 14, + [1], + id="4_play_playbook-middle_line_in_play_2", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 18, + [1], + id="4_play_playbook-last_line_in_play_2", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 19, + [2], + id="4_play_playbook-first_line_in_play_3", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 23, + [2], + id="4_play_playbook-middle_line_in_play_3", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 27, + [2], + id="4_play_playbook-last_line_in_play_3", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 28, + [3], + id="4_play_playbook-first_line_in_play_4", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 31, + [3], + id="4_play_playbook-middle_line_in_play_4", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 35, + [3], + id="4_play_playbook-last_line_in_play_4", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 100, + [3], + id="4_play_playbook-line_after_eof", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 1, + [], + id="import_playbook-line_before_play_1", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 2, + [0], + id="import_playbook-first_line_in_play_1", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 3, + [0], + id="import_playbook-middle_line_in_play_1", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 4, + [0], + id="import_playbook-last_line_in_play_1", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 5, + [1], + id="import_playbook-first_line_in_play_2", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 6, + [1], + id="import_playbook-middle_line_in_play_2", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 7, + [1], + id="import_playbook-last_line_in_play_2", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 8, + [2], + id="import_playbook-first_line_in_play_3", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 9, + [2], + id="import_playbook-last_line_in_play_3", + ), + pytest.param( + "examples/playbooks/playbook-parent.yml", + 15, + [2], + id="import_playbook-line_after_eof", + ), + ), +) +def test_get_path_to_play( + lintable: Lintable, + line_number: int, + ruamel_data: CommentedMap | CommentedSeq, + expected_path: list[int | str], +) -> None: + """Ensure ``get_path_to_play`` returns the expected path given a file + line.""" + path_to_play = ansiblelint.yaml_utils.get_path_to_play( + lintable, line_number, ruamel_data + ) + assert path_to_play == expected_path + + +@pytest.mark.parametrize( + ("file_path", "line_number", "expected_path"), + ( + # ignored lintables + pytest.param("examples/playbooks/vars/other.yml", 2, [], id="ignore_vars_file"), + pytest.param( + "examples/host_vars/localhost.yml", 2, [], id="ignore_host_vars_file" + ), + pytest.param("examples/group_vars/all.yml", 2, [], id="ignore_group_vars_file"), + pytest.param( + "examples/inventory/inventory.yml", 2, [], id="ignore_inventory_file" + ), + pytest.param( + "examples/roles/dependency_in_meta/meta/main.yml", + 2, + [], + id="ignore_meta_file", + ), + pytest.param( + "examples/reqs_v1/requirements.yml", 2, [], id="ignore_requirements_v1_file" + ), + pytest.param( + "examples/reqs_v2/requirements.yml", 2, [], id="ignore_requirements_v2_file" + ), + # we don't have any release notes examples. Oh well. + # pytest.param("examples/", 2, [], id="ignore_release_notes_file"), + pytest.param( + ".pre-commit-config.yaml", 2, [], id="ignore_unrecognized_yaml_file" + ), + # tasks-containing lintables + pytest.param( + "examples/playbooks/become.yml", + 4, + [], + id="1_task_playbook-line_before_tasks", + ), + pytest.param( + "examples/playbooks/become.yml", + 5, + [0, "tasks", 0], + id="1_task_playbook-first_line_in_task_1", + ), + pytest.param( + "examples/playbooks/become.yml", + 10, + [0, "tasks", 0], + id="1_task_playbook-middle_line_in_task_1", + ), + pytest.param( + "examples/playbooks/become.yml", + 15, + [0, "tasks", 0], + id="1_task_playbook-last_line_in_task_1", + ), + pytest.param( + "examples/playbooks/become.yml", + 100, + [0, "tasks", 0], + id="1_task_playbook-line_after_eof_without_anything_after_task", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 1, + [], + id="4_play_playbook-play_1_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 7, + [0, "tasks", 0], + id="4_play_playbook-play_1_first_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 9, + [0, "tasks", 0], + id="4_play_playbook-play_1_last_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 10, + [], + id="4_play_playbook-play_2_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 12, + [], + id="4_play_playbook-play_2_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 13, + [1, "tasks", 0], + id="4_play_playbook-play_2_first_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 18, + [1, "tasks", 0], + id="4_play_playbook-play_2_middle_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 18, + [1, "tasks", 0], + id="4_play_playbook-play_2_last_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 19, + [], + id="4_play_playbook-play_3_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 22, + [], + id="4_play_playbook-play_3_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 23, + [2, "tasks", 0], + id="4_play_playbook-play_3_first_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 25, + [2, "tasks", 0], + id="4_play_playbook-play_3_middle_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 27, + [2, "tasks", 0], + id="4_play_playbook-play_3_last_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 28, + [], + id="4_play_playbook-play_4_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 31, + [], + id="4_play_playbook-play_4_line_before_tasks", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 32, + [3, "tasks", 0], + id="4_play_playbook-play_4_first_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 33, + [3, "tasks", 0], + id="4_play_playbook-play_4_middle_line_task_1", + ), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 35, + [3, "tasks", 0], + id="4_play_playbook-play_4_last_line_task_1", + ), + # playbook with multiple tasks + tasks blocks in a play + pytest.param( + # must have at least one key after one of the tasks blocks + "examples/playbooks/include.yml", + 6, + [0, "pre_tasks", 0], + id="playbook-multi_tasks_blocks-pre_tasks_last_task_before_roles", + ), + pytest.param( + "examples/playbooks/include.yml", + 7, + [], + id="playbook-multi_tasks_blocks-roles_after_pre_tasks", + ), + pytest.param( + "examples/playbooks/include.yml", + 10, + [], + id="playbook-multi_tasks_blocks-roles_before_tasks", + ), + pytest.param( + "examples/playbooks/include.yml", + 12, + [0, "tasks", 0], + id="playbook-multi_tasks_blocks-tasks_first_task", + ), + pytest.param( + "examples/playbooks/include.yml", + 14, + [0, "tasks", 1], + id="playbook-multi_tasks_blocks-tasks_last_task_before_handlers", + ), + pytest.param( + "examples/playbooks/include.yml", + 16, + [0, "handlers", 0], + id="playbook-multi_tasks_blocks-handlers_task", + ), + # playbook with subtasks blocks + pytest.param( + "examples/playbooks/blockincludes.yml", + 14, + [0, "tasks", 0, "block", 1, "block", 0], + id="playbook-deeply_nested_task", + ), + pytest.param( + "examples/playbooks/block.yml", + 12, + [0, "tasks", 0, "block", 1], + id="playbook-subtasks-block_task_2", + ), + pytest.param( + "examples/playbooks/block.yml", + 22, + [0, "tasks", 0, "rescue", 2], + id="playbook-subtasks-rescue_task_3", + ), + pytest.param( + "examples/playbooks/block.yml", + 25, + [0, "tasks", 0, "always", 0], + id="playbook-subtasks-always_task_3", + ), + # tasks files + pytest.param("examples/playbooks/tasks/x.yml", 2, [0], id="tasks-null_task"), + pytest.param( + "examples/playbooks/tasks/x.yml", 6, [1], id="tasks-null_task_next" + ), + pytest.param( + "examples/playbooks/tasks/empty_blocks.yml", + 7, + [0], # this IS part of the first task and "rescue" does not have subtasks. + id="tasks-null_rescue", + ), + pytest.param( + "examples/playbooks/tasks/empty_blocks.yml", + 8, + [0], # this IS part of the first task and "always" does not have subtasks. + id="tasks-empty_always", + ), + pytest.param( + "examples/playbooks/tasks/empty_blocks.yml", + 16, + [1, "always", 0], + id="tasks-task_beyond_empty_blocks", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 1, + [], + id="tasks-line_before_tasks", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 2, + [0], + id="tasks-first_line_in_task_1", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 3, + [0], + id="tasks-middle_line_in_task_1", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 4, + [0], + id="tasks-last_line_in_task_1", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 5, + [1], + id="tasks-first_line_in_task_2", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 6, + [1], + id="tasks-middle_line_in_task_2", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 7, + [1], + id="tasks-last_line_in_task_2", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 8, + [2], + id="tasks-first_line_in_task_3", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 9, + [2], + id="tasks-last_line_in_task_3", + ), + pytest.param( + "examples/roles/more_complex/tasks/main.yml", + 100, + [2], + id="tasks-line_after_eof", + ), + # handlers + pytest.param( + "examples/roles/more_complex/handlers/main.yml", + 1, + [], + id="handlers-line_before_tasks", + ), + pytest.param( + "examples/roles/more_complex/handlers/main.yml", + 2, + [0], + id="handlers-first_line_in_task_1", + ), + pytest.param( + "examples/roles/more_complex/handlers/main.yml", + 3, + [0], + id="handlers-last_line_in_task_1", + ), + pytest.param( + "examples/roles/more_complex/handlers/main.yml", + 100, + [0], + id="handlers-line_after_eof", + ), + ), +) +def test_get_path_to_task( + lintable: Lintable, + line_number: int, + ruamel_data: CommentedMap | CommentedSeq, + expected_path: list[int | str], +) -> None: + """Ensure ``get_task_to_play`` returns the expected path given a file + line.""" + path_to_task = ansiblelint.yaml_utils.get_path_to_task( + lintable, line_number, ruamel_data + ) + assert path_to_task == expected_path + + +@pytest.mark.parametrize( + ("file_path", "line_number"), + ( + pytest.param("examples/playbooks/become.yml", 0, id="1_play_playbook"), + pytest.param( + "examples/playbooks/rule-partial-become-without-become-pass.yml", + 0, + id="4_play_playbook", + ), + pytest.param("examples/playbooks/playbook-parent.yml", 0, id="import_playbook"), + pytest.param("examples/playbooks/become.yml", 0, id="1_task_playbook"), + ), +) +def test_get_path_to_play_raises_value_error_for_bad_line_number( + lintable: Lintable, + line_number: int, + ruamel_data: CommentedMap | CommentedSeq, +) -> None: + """Ensure ``get_path_to_play`` raises ValueError for line_number < 1.""" + with pytest.raises( + ValueError, match=f"expected line_number >= 1, got {line_number}" + ): + ansiblelint.yaml_utils.get_path_to_play(lintable, line_number, ruamel_data) + + +@pytest.mark.parametrize( + ("file_path", "line_number"), + (pytest.param("examples/roles/more_complex/tasks/main.yml", 0, id="tasks"),), +) +def test_get_path_to_task_raises_value_error_for_bad_line_number( + lintable: Lintable, + line_number: int, + ruamel_data: CommentedMap | CommentedSeq, +) -> None: + """Ensure ``get_task_to_play`` raises ValueError for line_number < 1.""" + with pytest.raises( + ValueError, match=f"expected line_number >= 1, got {line_number}" + ): + ansiblelint.yaml_utils.get_path_to_task(lintable, line_number, ruamel_data) + + +@pytest.mark.parametrize( + ("before", "after"), + ( + pytest.param(None, None, id="1"), + pytest.param(1, 1, id="2"), + pytest.param({}, {}, id="3"), + pytest.param({"__file__": 1}, {}, id="simple"), + pytest.param({"foo": {"__file__": 1}}, {"foo": {}}, id="nested"), + pytest.param([{"foo": {"__file__": 1}}], [{"foo": {}}], id="nested-in-lint"), + pytest.param({"foo": [{"__file__": 1}]}, {"foo": [{}]}, id="nested-in-lint"), + ), +) +def test_deannotate( + before: Any, + after: Any, +) -> None: + """Ensure deannotate works as intended.""" + assert ansiblelint.yaml_utils.deannotate(before) == after diff --git a/tools/get-version.sh b/tools/get-version.sh new file mode 100755 index 0000000..67033f8 --- /dev/null +++ b/tools/get-version.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e +{ + python3 -c "import setuptools_scm" || python3 -m pip install --user setuptools-scm +} 1>&2 # redirect stdout to stderr to avoid polluting the output +python3 -m setuptools_scm | \ + sed 's/Guessed Version\([^+]\+\).*/\1/' diff --git a/tools/install-reqs.sh b/tools/install-reqs.sh new file mode 100755 index 0000000..d4f3624 --- /dev/null +++ b/tools/install-reqs.sh @@ -0,0 +1,24 @@ +#!/bin/bash +set -euo pipefail +pushd examples/playbooks/collections >/dev/null +MISSING=() +export ANSIBLE_COLLECTIONS_PATH=. +for COLLECTION in ansible.posix community.general community.molecule; +do + COL_NAME=${COLLECTION//\./-} + FILENAME=$(find . -maxdepth 1 -name "$COL_NAME*" -print -quit) + if test -n "$FILENAME" + then + echo "Already cached $COL_NAME as $FILENAME" + else + MISSING+=("${COLLECTION}") + fi + if [ ${#MISSING[@]} -ne 0 ]; then + ansible-galaxy collection download -p . -v "${MISSING[@]}" + fi +done + +echo "Install requirements.yml ..." +ansible-galaxy collection install *.tar.gz -p . +ansible-galaxy collection list +popd >/dev/null diff --git a/tools/test-setup.sh b/tools/test-setup.sh new file mode 100755 index 0000000..79dbcc8 --- /dev/null +++ b/tools/test-setup.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# This tool is used to setup the environment for running the tests. Its name +# name and location is based on Zuul CI, which can automatically run it. +set -euo pipefail + +# User specific environment +# shellcheck disable=SC2076 +if ! [[ "$PATH" =~ "$HOME/.local/bin" ]] +then + PATH="$HOME/.local/bin:$PATH" +fi + +if [ -f "/usr/bin/apt-get" ]; then + if [ ! -f "/var/cache/apt/pkgcache.bin" ]; then + sudo apt-get update # mandatory or other apt-get commands fail + fi + # avoid outdated ansible and pipx + sudo apt-get remove -y ansible pipx || true + # cspell:disable-next-line + sudo apt-get install -y --no-install-recommends -o=Dpkg::Use-Pty=0 \ + curl gcc git python3-venv python3-pip python3-dev libyaml-dev + # Some of these might be needed for compiling packages that do not yet + # a binary for current platform, like pyyaml on py311 + # pip3 install -v --no-binary :all: --user pyyaml +fi + +# Log some useful info in case of unexpected failures: +uname +python3 --version diff --git a/tools/update-version.sh b/tools/update-version.sh new file mode 100755 index 0000000..e2cd24d --- /dev/null +++ b/tools/update-version.sh @@ -0,0 +1,7 @@ +#!/bin/bash +DIR=$(dirname "$0") +VERSION=$(./tools/get-version.sh) +mkdir -p "${DIR}/../dist" +sed -e "s/VERSION_PLACEHOLDER/${VERSION}/" \ + "${DIR}/../.config/ansible-lint.spec" \ + > "${DIR}/../dist/ansible-lint.spec" @@ -0,0 +1,215 @@ +# spell-checker:ignore linkcheck basepython changedir envdir envlist envname envsitepackagesdir passenv setenv testenv toxinidir toxworkdir usedevelop doctrees envpython posargs +[tox] +minversion = 4.0.0 +envlist = + lint + pkg + hook + docs + schemas + py + py-devel + eco +isolated_build = true +skip_missing_interpreters = True + +[testenv] +description = + Run the tests under {basepython} and + devel: ansible devel branch +deps = + --editable .[test] + devel: ansible-core @ git+https://github.com/ansible/ansible.git # GPLv3+ +commands = + # safety measure to assure we do not accidentally run tests with broken dependencies + {envpython} -m pip check + coverage run -m pytest {posargs:\ + -n auto \ + -ra \ + --showlocals \ + --doctest-modules \ + --durations=10 \ + -m "not eco" \ + } + sh -c "coverage combine -a -q --data-file=.coverage .tox/.coverage.*" + +passenv = + CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437 + FORCE_COLOR + HOME + NO_COLOR + PYTEST_* # allows developer to define their own preferences + PYTEST_REQPASS # needed for CI + PYTHON* # PYTHONPYCACHEPREFIX, PYTHONIOENCODING, PYTHONBREAKPOINT,... + PY_COLORS + RTD_TOKEN + REQUESTS_CA_BUNDLE # https proxies + SETUPTOOLS_SCM_DEBUG + SSL_CERT_FILE # https proxies + SSH_AUTH_SOCK # may be needed by git when running with progressive + LANG + LC_* +# recreate = True +setenv = + # Avoid runtime warning that might affect our devel testing + devel: ANSIBLE_DEVEL_WARNING = false + COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}} + COVERAGE_PROCESS_START={toxinidir}/pyproject.toml + PIP_CONSTRAINT = {toxinidir}/.config/requirements.txt + devel,py38,pkg: PIP_CONSTRAINT = /dev/null + PIP_DISABLE_PIP_VERSION_CHECK = 1 + PRE_COMMIT_COLOR = always + FORCE_COLOR = 1 +allowlist_externals = + find + git + pwd + rm + sh + tox +# both options needed to workaround https://github.com/tox-dev/tox/issues/2197 +usedevelop = false +skip_install = true + +[testenv:lint] +description = Run all linters +# pip compile includes python version in output constraints, so we want to +# be sure that version does not change randomly. +basepython = python3.9 +deps = + pre-commit>=2.6.0 + setuptools>=51.1.1 + pytest>=7.2.0 # to updated schemas +skip_install = true +commands = + {envpython} -m pre_commit run --all-files --show-diff-on-failure {posargs:} +passenv = + {[testenv]passenv} + PRE_COMMIT_HOME +setenv = + {[testenv]setenv} + # avoid messing pre-commit with out own constraints + PIP_CONSTRAINT= + +[testenv:hook] +description = Validate pre-commit hook definition +deps = pre-commit +commands = + sh -c "mkdir -p .tox/x && cd .tox/x && git init && python3 -m pre_commit try-repo -v {toxinidir} ansible-lint" +setenv = + PIP_CONSTRAINT=/dev/null + +[testenv:deps] +description = Bump all test dependencies +# we reuse the lint environment +envdir = {toxworkdir}/lint +skip_install = true +basepython = python3.9 +deps = + {[testenv:lint]deps} +setenv = + # without his upgrade would likely not do anything + PIP_CONSTRAINT = /dev/null +commands = + pre-commit run --all-files --show-diff-on-failure --hook-stage manual lock + pre-commit run --all-files --show-diff-on-failure --hook-stage manual up + # Update pre-commit hooks + pre-commit autoupdate + # Update npm deps + sh -c "cd test/schemas && npm run deps" + # We fail if files are modified at the end + git diff --exit-code + +[testenv:docs] +description = Builds docs +extras = + docs +setenv = + # Disable colors until markdown-exec supports it: + # https://github.com/pawamoy/markdown-exec/issues/11 + NO_COLOR = 1 +skip_install = false +usedevelop = true +commands = + mkdocs build {posargs:} + +[testenv:redirects] +description = Update documentation redirections for readthedocs +deps = + readthedocs-cli +commands = + # This assumes you loaded RTD_TOKEN in your environment + rtd projects ansible-lint redirects sync -f docs/redirects.yml --wet-run + +[testenv:schemas] +description = Rebuild and test JSON Schemas +deps = + check-jsonschema +skip_install = true +changedir = test/schemas +commands_pre = + npm install +commands = + npm test +allowlist_externals = + npm + +[testenv:eco] +description = Perform ecosystem impact (downstream testing) https://github.com/ansible/ansible-lint/discussions/1403 +skip_install = true +deps = + {[testenv]deps} +commands = + python3 -m venv .tox/venv + .tox/venv/bin/pip install -q git+https://github.com/ansible/ansible-lint@main + pytest -n auto --durations=3 -m eco +allowlist_externals = + .tox/venv/bin/pip +setenv = + PYTEST_REQPASS=7 + +[testenv:pkg] +description = + Build package, verify metadata, install package and assert behavior when ansible is missing. +deps = + build >= 0.9.0 + twine >= 4.0.1 +skip_install = true +# Ref: https://twitter.com/di_codes/status/1044358639081975813 +commands = + # build wheel and sdist using PEP-517 + {envpython} -c 'import os.path, shutil, sys; \ + dist_dir = os.path.join("{toxinidir}", "dist"); \ + os.path.isdir(dist_dir) or sys.exit(0); \ + print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \ + shutil.rmtree(dist_dir)' + {envpython} -m build --outdir {toxinidir}/dist/ {toxinidir} + # Validate metadata using twine + twine check --strict {toxinidir}/dist/* + # Install the wheel + sh -c 'python3 -m pip install "ansible-lint[lock] @ file://$(echo {toxinidir}/dist/*.whl)"' + # Uninstall it + python3 -m pip uninstall -y ansible-lint + +[testenv:clean] +description = Remove temporary files +skip_install = true +deps = +commands = + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -name coverage.xml -name .coverage + rm -rf .mypy_cache + +[testenv:coverage] +description = Combines and displays coverage results +skip_install = true +usedevelop = false +setenv = + COVERAGE_PROCESS_START={toxinidir}/pyproject.toml +commands = + python3 -m coverage --version + # needed by codecov github actions, also ignored result to reach report one. + python3 -m coverage xml --fail-under=0 + # just for humans running it: + python3 -m coverage report +deps = + coverage[toml]>=7.0.5 |