summaryrefslogtreecommitdiffstats
path: root/third_party/python/jsonschema/json/bin/jsonschema_suite
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-28 14:29:10 +0000
commit2aa4a82499d4becd2284cdb482213d541b8804dd (patch)
treeb80bf8bf13c3766139fbacc530efd0dd9d54394c /third_party/python/jsonschema/json/bin/jsonschema_suite
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 86.0.1.upstream/86.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/python/jsonschema/json/bin/jsonschema_suite')
-rwxr-xr-xthird_party/python/jsonschema/json/bin/jsonschema_suite298
1 files changed, 298 insertions, 0 deletions
diff --git a/third_party/python/jsonschema/json/bin/jsonschema_suite b/third_party/python/jsonschema/json/bin/jsonschema_suite
new file mode 100755
index 0000000000..6b1c486450
--- /dev/null
+++ b/third_party/python/jsonschema/json/bin/jsonschema_suite
@@ -0,0 +1,298 @@
+#! /usr/bin/env python3
+from __future__ import print_function
+from pprint import pformat
+import argparse
+import errno
+import fnmatch
+import json
+import os
+import random
+import shutil
+import sys
+import textwrap
+import unittest
+import warnings
+
+if getattr(unittest, "skipIf", None) is None:
+ unittest.skipIf = lambda cond, msg : lambda fn : fn
+
+try:
+ import jsonschema
+except ImportError:
+ jsonschema = None
+else:
+ validators = getattr(
+ jsonschema.validators, "validators", jsonschema.validators
+ )
+
+
+ROOT_DIR = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir).rstrip("__pycache__"),
+)
+SUITE_ROOT_DIR = os.path.join(ROOT_DIR, "tests")
+
+REMOTES = {
+ "integer.json": {u"type": u"integer"},
+ "name.json": {
+ u"type": "string",
+ u"definitions": {
+ u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
+ },
+ },
+ "name-defs.json": {
+ u"type": "string",
+ u"$defs": {
+ u"orNull": {u"anyOf": [{u"type": u"null"}, {u"$ref": u"#"}]},
+ },
+ },
+ "subSchemas.json": {
+ u"integer": {u"type": u"integer"},
+ u"refToInteger": {u"$ref": u"#/integer"},
+ },
+ "folder/folderInteger.json": {u"type": u"integer"}
+}
+REMOTES_DIR = os.path.join(ROOT_DIR, "remotes")
+
+with open(os.path.join(ROOT_DIR, "test-schema.json")) as schema:
+ TESTSUITE_SCHEMA = json.load(schema)
+
+def files(paths):
+ for path in paths:
+ with open(path) as test_file:
+ yield json.load(test_file)
+
+
+def groups(paths):
+ for test_file in files(paths):
+ for group in test_file:
+ yield group
+
+
+def cases(paths):
+ for test_group in groups(paths):
+ for test in test_group["tests"]:
+ test["schema"] = test_group["schema"]
+ yield test
+
+
+def collect(root_dir):
+ for root, dirs, files in os.walk(root_dir):
+ for filename in fnmatch.filter(files, "*.json"):
+ yield os.path.join(root, filename)
+
+
+class SanityTests(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print("Looking for tests in %s" % SUITE_ROOT_DIR)
+ cls.test_files = list(collect(SUITE_ROOT_DIR))
+ print("Found %s test files" % len(cls.test_files))
+ assert cls.test_files, "Didn't find the test files!"
+
+ def test_all_files_are_valid_json(self):
+ for path in self.test_files:
+ with open(path) as test_file:
+ try:
+ json.load(test_file)
+ except ValueError as error:
+ self.fail("%s contains invalid JSON (%s)" % (path, error))
+
+ def test_all_descriptions_have_reasonable_length(self):
+ for case in cases(self.test_files):
+ description = case["description"]
+ self.assertLess(
+ len(description),
+ 70,
+ "%r is too long! (keep it to less than 70 chars)" % (
+ description,
+ ),
+ )
+
+ def test_all_descriptions_are_unique(self):
+ for group in groups(self.test_files):
+ descriptions = set(test["description"] for test in group["tests"])
+ self.assertEqual(
+ len(descriptions),
+ len(group["tests"]),
+ "%r contains a duplicate description" % (group,)
+ )
+
+ @unittest.skipIf(jsonschema is None, "Validation library not present!")
+ def test_all_schemas_are_valid(self):
+ for schema in os.listdir(SUITE_ROOT_DIR):
+ schema_validator = validators.get(schema)
+ if schema_validator is not None:
+ test_files = collect(os.path.join(SUITE_ROOT_DIR, schema))
+ for case in cases(test_files):
+ try:
+ schema_validator.check_schema(case["schema"])
+ except jsonschema.SchemaError as error:
+ self.fail("%s contains an invalid schema (%s)" %
+ (case, error))
+ else:
+ warnings.warn("No schema validator for %s" % schema)
+
+ @unittest.skipIf(jsonschema is None, "Validation library not present!")
+ def test_suites_are_valid(self):
+ validator = jsonschema.Draft4Validator(TESTSUITE_SCHEMA)
+ for tests in files(self.test_files):
+ try:
+ validator.validate(tests)
+ except jsonschema.ValidationError as error:
+ self.fail(str(error))
+
+ def test_remote_schemas_are_updated(self):
+ files = {}
+ for parent, _, paths in os.walk(REMOTES_DIR):
+ for path in paths:
+ absolute_path = os.path.join(parent, path)
+ with open(absolute_path) as schema_file:
+ files[absolute_path] = json.load(schema_file)
+
+ expected = {
+ os.path.join(REMOTES_DIR, path): contents
+ for path, contents in REMOTES.items()
+ }
+
+ missing = set(files).symmetric_difference(expected)
+ changed = {
+ path
+ for path, contents in expected.items()
+ if path in files
+ and contents != files[path]
+ }
+
+ self.assertEqual(
+ files,
+ expected,
+ msg=textwrap.dedent(
+ """
+ Remotes in the remotes/ directory do not match those in the
+ ``jsonschema_suite`` Python script.
+
+ Unfortunately for the minute, each remote file is duplicated in
+ two places.""" + ("""
+
+ Only present in one location:
+
+ {}""".format("\n".join(missing)) if missing else "") + ("""
+
+ Conflicting between the two:
+
+ {}""".format("\n".join(changed)) if changed else "")
+ )
+ )
+
+
+def main(arguments):
+ if arguments.command == "check":
+ suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests)
+ result = unittest.TextTestRunner(verbosity=2).run(suite)
+ sys.exit(not result.wasSuccessful())
+ elif arguments.command == "flatten":
+ selected_cases = [case for case in cases(collect(arguments.version))]
+
+ if arguments.randomize:
+ random.shuffle(selected_cases)
+
+ json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True)
+ elif arguments.command == "remotes":
+ json.dump(REMOTES, sys.stdout, indent=4, sort_keys=True)
+ elif arguments.command == "dump_remotes":
+ if arguments.update:
+ shutil.rmtree(arguments.out_dir, ignore_errors=True)
+
+ try:
+ os.makedirs(arguments.out_dir)
+ except OSError as e:
+ if e.errno == errno.EEXIST:
+ print("%s already exists. Aborting." % arguments.out_dir)
+ sys.exit(1)
+ raise
+
+ for url, schema in REMOTES.items():
+ filepath = os.path.join(arguments.out_dir, url)
+
+ try:
+ os.makedirs(os.path.dirname(filepath))
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ with open(filepath, "w") as out_file:
+ json.dump(schema, out_file, indent=4, sort_keys=True)
+ out_file.write("\n")
+ elif arguments.command == "serve":
+ try:
+ from flask import Flask, jsonify
+ except ImportError:
+ print(textwrap.dedent("""
+ The Flask library is required to serve the remote schemas.
+
+ You can install it by running `pip install Flask`.
+
+ Alternatively, see the `jsonschema_suite remotes` or
+ `jsonschema_suite dump_remotes` commands to create static files
+ that can be served with your own web server.
+ """.strip("\n")))
+ sys.exit(1)
+
+ app = Flask(__name__)
+
+ @app.route("/<path:path>")
+ def serve_path(path):
+ if path in REMOTES:
+ return jsonify(REMOTES[path])
+ return "Document does not exist.", 404
+
+ app.run(port=1234)
+
+
+parser = argparse.ArgumentParser(
+ description="JSON Schema Test Suite utilities",
+)
+subparsers = parser.add_subparsers(help="utility commands", dest="command")
+
+check = subparsers.add_parser("check", help="Sanity check the test suite.")
+
+flatten = subparsers.add_parser(
+ "flatten",
+ help="Output a flattened file containing a selected version's test cases."
+)
+flatten.add_argument(
+ "--randomize",
+ action="store_true",
+ help="Randomize the order of the outputted cases.",
+)
+flatten.add_argument(
+ "version", help="The directory containing the version to output",
+)
+
+remotes = subparsers.add_parser(
+ "remotes",
+ help="Output the expected URLs and their associated schemas for remote "
+ "ref tests as a JSON object."
+)
+
+dump_remotes = subparsers.add_parser(
+ "dump_remotes", help="Dump the remote ref schemas into a file tree",
+)
+dump_remotes.add_argument(
+ "--update",
+ action="store_true",
+ help="Update the remotes in an existing directory.",
+)
+dump_remotes.add_argument(
+ "--out-dir",
+ default=REMOTES_DIR,
+ type=os.path.abspath,
+ help="The output directory to create as the root of the file tree",
+)
+
+serve = subparsers.add_parser(
+ "serve",
+ help="Start a webserver to serve schemas used by remote ref tests."
+)
+
+if __name__ == "__main__":
+ main(parser.parse_args())