summaryrefslogtreecommitdiffstats
path: root/test/schemas/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:06:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-13 12:06:49 +0000
commit2fe34b6444502079dc0b84365ce82dbc92de308e (patch)
tree8fedcab52bbbc3db6c5aa909a88a7a7b81685018 /test/schemas/src
parentInitial commit. (diff)
downloadansible-lint-2fe34b6444502079dc0b84365ce82dbc92de308e.tar.xz
ansible-lint-2fe34b6444502079dc0b84365ce82dbc92de308e.zip
Adding upstream version 6.17.2.upstream/6.17.2
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'test/schemas/src')
-rw-r--r--test/schemas/src/rebuild.py140
-rw-r--r--test/schemas/src/schema.spec.ts184
2 files changed, 324 insertions, 0 deletions
diff --git a/test/schemas/src/rebuild.py b/test/schemas/src/rebuild.py
new file mode 100644
index 0000000..2fab8c0
--- /dev/null
+++ b/test/schemas/src/rebuild.py
@@ -0,0 +1,140 @@
+"""Utility to generate some complex patterns."""
+import copy
+import json
+import keyword
+import sys
+from pathlib import Path
+from typing import Any
+
+play_keywords = list(
+ filter(
+ None,
+ """\
+any_errors_fatal
+become
+become_exe
+become_flags
+become_method
+become_user
+check_mode
+collections
+connection
+debugger
+diff
+environment
+fact_path
+force_handlers
+gather_facts
+gather_subset
+gather_timeout
+handlers
+hosts
+ignore_errors
+ignore_unreachable
+max_fail_percentage
+module_defaults
+name
+no_log
+order
+port
+post_tasks
+pre_tasks
+remote_user
+roles
+run_once
+serial
+strategy
+tags
+tasks
+throttle
+timeout
+vars
+vars_files
+vars_prompt
+""".split(),
+ ),
+)
+
+
+def is_ref_used(obj: Any, ref: str) -> bool:
+ """Return a reference use from a schema."""
+ ref_use = f"#/$defs/{ref}"
+ if isinstance(obj, dict):
+ if obj.get("$ref", None) == ref_use:
+ return True
+ for _ in obj.values():
+ if isinstance(_, (dict, list)) and is_ref_used(_, ref):
+ return True
+ elif isinstance(obj, list):
+ for _ in obj:
+ if isinstance(_, (dict, list)) and is_ref_used(_, ref):
+ return True
+ return False
+
+
+if __name__ == "__main__":
+ invalid_var_names = sorted(list(keyword.kwlist) + play_keywords)
+ if "__peg_parser__" in invalid_var_names:
+ invalid_var_names.remove("__peg_parser__")
+ print("Updating invalid var names") # noqa: T201
+
+ with Path("f/vars.json").open("r+", encoding="utf-8") as f:
+ vars_schema = json.load(f)
+ vars_schema["anyOf"][0]["patternProperties"] = {
+ f"^(?!({'|'.join(invalid_var_names)})$)[a-zA-Z_][\\w]*$": {},
+ }
+ f.seek(0)
+ json.dump(vars_schema, f, indent=2)
+ f.write("\n")
+ f.truncate()
+
+ print("Compiling subschemas...") # noqa: T201
+ with Path("f/ansible.json").open(encoding="utf-8") as f:
+ combined_json = json.load(f)
+
+ for subschema in ["tasks", "playbook"]:
+ sub_json = copy.deepcopy(combined_json)
+ # remove unsafe keys from root
+ for key in [
+ "$id",
+ "id",
+ "title",
+ "description",
+ "type",
+ "default",
+ "items",
+ "properties",
+ "additionalProperties",
+ "examples",
+ ]:
+ if key in sub_json:
+ del sub_json[key]
+ for key in sub_json:
+ if key not in ["$schema", "$defs"]:
+ print( # noqa: T201
+ f"Unexpected key found at combined schema root: ${key}",
+ )
+ sys.exit(2)
+ # Copy keys from subschema to root
+ for key, value in combined_json["$defs"][subschema].items():
+ sub_json[key] = value
+ sub_json["$comment"] = "Generated from ansible.json, do not edit."
+ sub_json[
+ "$id"
+ ] = f"https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/{subschema}.json"
+
+ # Remove all unreferenced ($ref) definitions ($defs) recursively
+ while True:
+ spare = []
+ for k in sub_json["$defs"]:
+ if not is_ref_used(sub_json, k):
+ spare.append(k)
+ for k in spare:
+ print(f"{subschema}: deleting unused '{k}' definition") # noqa: T201
+ del sub_json["$defs"][k]
+ if not spare:
+ break
+
+ with Path(f"f/{subschema}.json").open("w", encoding="utf-8") as f:
+ json.dump(sub_json, f, indent=2, sort_keys=True)
+ f.write("\n")
diff --git a/test/schemas/src/schema.spec.ts b/test/schemas/src/schema.spec.ts
new file mode 100644
index 0000000..b826461
--- /dev/null
+++ b/test/schemas/src/schema.spec.ts
@@ -0,0 +1,184 @@
+import * as path from "path";
+import Ajv from "ajv";
+import fs from "fs";
+import { minimatch } from "minimatch";
+import yaml from "js-yaml";
+import { assert } from "chai";
+import stringify from "safe-stable-stringify";
+import { integer } from "vscode-languageserver-types";
+import { exec } from "child_process";
+const spawnSync = require("child_process").spawnSync;
+
+function ansiRegex({ onlyFirst = false } = {}) {
+ const pattern = [
+ "[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)",
+ "(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))",
+ ].join("|");
+
+ return new RegExp(pattern, onlyFirst ? undefined : "g");
+}
+
+function stripAnsi(data: string) {
+ if (typeof data !== "string") {
+ throw new TypeError(
+ `Expected a \`string\`, got \`${typeof data}\ = ${data}`
+ );
+ }
+ return data.replace(ansiRegex(), "");
+}
+
+const ajv = new Ajv({
+ strictTypes: false,
+ strict: false,
+ inlineRefs: true, // https://github.com/ajv-validator/ajv/issues/1581#issuecomment-832211568
+ allErrors: true, // https://github.com/ajv-validator/ajv/issues/1581#issuecomment-832211568
+});
+
+// load whitelist of all test file subjects schemas can reference
+const test_files = getAllFiles("./test");
+const negative_test_files = getAllFiles("./negative_test");
+
+// load all schemas
+const schema_files = fs
+ .readdirSync("f/")
+ .filter((el) => path.extname(el) === ".json");
+console.log(`Schemas: ${schema_files}`);
+
+describe("schemas under f/", function () {
+ schema_files.forEach((schema_file) => {
+ if (
+ schema_file.startsWith("_") ||
+ ["ansible-navigator-config.json", "rulebook.json"].includes(schema_file)
+ ) {
+ return;
+ }
+ const schema_json = JSON.parse(fs.readFileSync(`f/${schema_file}`, "utf8"));
+ ajv.addSchema(schema_json);
+ const validator = ajv.compile(schema_json);
+ if (schema_json.examples == undefined) {
+ console.error(
+ `Schema file ${schema_file} is missing an examples key that we need for documenting file matching patterns.`
+ );
+ return process.exit(1);
+ }
+ describe(schema_file, function () {
+ getTestFiles(schema_json.examples).forEach(
+ ({ file: test_file, expect_fail }) => {
+ it(`linting ${test_file} using ${schema_file}`, function () {
+ var errors_md = "";
+ const result = validator(
+ yaml.load(fs.readFileSync(test_file, "utf8"))
+ );
+ if (validator.errors) {
+ errors_md += "# ajv errors\n\n```json\n";
+ errors_md += stringify(validator.errors, null, 2);
+ errors_md += "\n```\n\n";
+ }
+ // validate using check-jsonschema (python-jsonschema):
+ // const py = exec();
+ // Do not use python -m ... calling notation because for some
+ // reason, nodejs environment lacks some env variables needed
+ // and breaks usage from inside virtualenvs.
+ const proc = spawnSync(
+ `${process.env.VIRTUAL_ENV}/bin/check-jsonschema -v -o json --schemafile f/${schema_file} ${test_file}`,
+ { shell: true, encoding: "utf-8", stdio: "pipe" }
+ );
+ if (proc.status != 0) {
+ // real errors are sent to stderr due to https://github.com/python-jsonschema/check-jsonschema/issues/88
+ errors_md += "# check-jsonschema\n\nstdout:\n\n```json\n";
+ errors_md += stripAnsi(proc.output[1]);
+ errors_md += "```\n";
+ if (proc.output[2]) {
+ errors_md += "\nstderr:\n\n```\n";
+ errors_md += stripAnsi(proc.output[2]);
+ errors_md += "```\n";
+ }
+ }
+
+ // dump errors to markdown file for manual inspection
+ const md_filename = `${test_file}.md`;
+ if (errors_md) {
+ fs.writeFileSync(md_filename, errors_md);
+ } else {
+ // if no error occurs, we should ensure there is no md file present
+ fs.unlink(md_filename, function (err) {
+ if (err && err.code != "ENOENT") {
+ console.error(`Failed to remove ${md_filename}.`);
+ }
+ });
+ }
+ assert.equal(
+ result,
+ !expect_fail,
+ `${JSON.stringify(validator.errors)}`
+ );
+ });
+ }
+ );
+ // All /$defs/ that have examples property are assumed to be
+ // subschemas, "tasks" being the primary such case, which is also used
+ // for validating separated files.
+ for (var definition in schema_json["$defs"]) {
+ if (schema_json["$defs"][definition].examples) {
+ const subschema_uri = `${schema_json["$id"]}#/$defs/${definition}`;
+ const subschema_validator = ajv.getSchema(subschema_uri);
+ if (!subschema_validator) {
+ console.error(`Failed to load subschema ${subschema_uri}`);
+ return process.exit(1);
+ }
+ getTestFiles(schema_json["$defs"][definition].examples).forEach(
+ ({ file: test_file, expect_fail }) => {
+ it(`linting ${test_file} using ${subschema_uri}`, function () {
+ const result = subschema_validator(
+ yaml.load(fs.readFileSync(test_file, "utf8"))
+ );
+ assert.equal(
+ result,
+ !expect_fail,
+ `${JSON.stringify(validator.errors)}`
+ );
+ });
+ }
+ );
+ }
+ }
+ });
+ });
+});
+
+// find all tests for each schema file
+function getTestFiles(
+ globs: string[]
+): { file: string; expect_fail: boolean }[] {
+ const files = Array.from(
+ new Set(
+ globs
+ .map((glob: any) => minimatch.match(test_files, path.join("**", glob)))
+ .flat()
+ )
+ );
+ const negative_files = Array.from(
+ new Set(
+ globs
+ .map((glob: any) =>
+ minimatch.match(negative_test_files, path.join("**", glob))
+ )
+ .flat()
+ )
+ );
+
+ // All fails ending with fail, like `foo.fail.yml` are expected to fail validation
+ let result = files.map((f) => ({ file: f, expect_fail: false }));
+ result = result.concat(
+ negative_files.map((f) => ({ file: f, expect_fail: true }))
+ );
+ return result;
+}
+
+function getAllFiles(dir: string): string[] {
+ return fs.readdirSync(dir).reduce((files: string[], file: string) => {
+ const name = path.join(dir, file);
+ const isDirectory = fs.statSync(name).isDirectory();
+ return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name];
+ }, []);
+}