summaryrefslogtreecommitdiffstats
path: root/testing/web-platform/tests/common/security-features/tools
diff options
context:
space:
mode:
Diffstat (limited to 'testing/web-platform/tests/common/security-features/tools')
-rw-r--r--testing/web-platform/tests/common/security-features/tools/format_spec_src_json.py24
-rwxr-xr-xtesting/web-platform/tests/common/security-features/tools/generate.py464
-rw-r--r--testing/web-platform/tests/common/security-features/tools/spec.src.json533
-rwxr-xr-xtesting/web-platform/tests/common/security-features/tools/spec_validator.py253
-rw-r--r--testing/web-platform/tests/common/security-features/tools/template/disclaimer.template1
-rw-r--r--testing/web-platform/tests/common/security-features/tools/template/spec_json.js.template1
-rw-r--r--testing/web-platform/tests/common/security-features/tools/template/test.debug.html.template26
-rw-r--r--testing/web-platform/tests/common/security-features/tools/template/test.release.html.template22
-rw-r--r--testing/web-platform/tests/common/security-features/tools/util.py230
9 files changed, 1554 insertions, 0 deletions
diff --git a/testing/web-platform/tests/common/security-features/tools/format_spec_src_json.py b/testing/web-platform/tests/common/security-features/tools/format_spec_src_json.py
new file mode 100644
index 0000000000..d1bf5817ad
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/format_spec_src_json.py
@@ -0,0 +1,24 @@
+import collections
+import json
+import os
+
+
+def main():
+ '''Formats spec.src.json.'''
+ script_directory = os.path.dirname(os.path.abspath(__file__))
+ for dir in [
+ 'mixed-content', 'referrer-policy', 'referrer-policy/4K-1',
+ 'referrer-policy/4K', 'referrer-policy/4K+1',
+ 'upgrade-insecure-requests'
+ ]:
+ filename = os.path.join(script_directory, '..', '..', '..', dir,
+ 'spec.src.json')
+ spec = json.load(
+ open(filename, 'r'), object_pairs_hook=collections.OrderedDict)
+ with open(filename, 'w') as f:
+ f.write(json.dumps(spec, indent=2, separators=(',', ': ')))
+ f.write('\n')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/common/security-features/tools/generate.py b/testing/web-platform/tests/common/security-features/tools/generate.py
new file mode 100755
index 0000000000..176e0ebbeb
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/generate.py
@@ -0,0 +1,464 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+import argparse
+import collections
+import copy
+import json
+import os
+import sys
+
+import spec_validator
+import util
+
+
+def expand_pattern(expansion_pattern, test_expansion_schema):
+ expansion = {}
+ for artifact_key in expansion_pattern:
+ artifact_value = expansion_pattern[artifact_key]
+ if artifact_value == '*':
+ expansion[artifact_key] = test_expansion_schema[artifact_key]
+ elif isinstance(artifact_value, list):
+ expansion[artifact_key] = artifact_value
+ elif isinstance(artifact_value, dict):
+ # Flattened expansion.
+ expansion[artifact_key] = []
+ values_dict = expand_pattern(artifact_value,
+ test_expansion_schema[artifact_key])
+ for sub_key in values_dict.keys():
+ expansion[artifact_key] += values_dict[sub_key]
+ else:
+ expansion[artifact_key] = [artifact_value]
+
+ return expansion
+
+
+def permute_expansion(expansion,
+ artifact_order,
+ selection={},
+ artifact_index=0):
+ assert isinstance(artifact_order, list), "artifact_order should be a list"
+
+ if artifact_index >= len(artifact_order):
+ yield selection
+ return
+
+ artifact_key = artifact_order[artifact_index]
+
+ for artifact_value in expansion[artifact_key]:
+ selection[artifact_key] = artifact_value
+ for next_selection in permute_expansion(expansion, artifact_order,
+ selection, artifact_index + 1):
+ yield next_selection
+
+
+# Dumps the test config `selection` into a serialized JSON string.
+def dump_test_parameters(selection):
+ return json.dumps(
+ selection,
+ indent=2,
+ separators=(',', ': '),
+ sort_keys=True,
+ cls=util.CustomEncoder)
+
+
+def get_test_filename(spec_directory, spec_json, selection):
+ '''Returns the filname for the main test HTML file'''
+
+ selection_for_filename = copy.deepcopy(selection)
+ # Use 'unset' rather than 'None' in test filenames.
+ if selection_for_filename['delivery_value'] is None:
+ selection_for_filename['delivery_value'] = 'unset'
+
+ return os.path.join(
+ spec_directory,
+ spec_json['test_file_path_pattern'] % selection_for_filename)
+
+
+def get_csp_value(value):
+ '''
+ Returns actual CSP header values (e.g. "worker-src 'self'") for the
+ given string used in PolicyDelivery's value (e.g. "worker-src-self").
+ '''
+
+ # script-src
+ # Test-related scripts like testharness.js and inline scripts containing
+ # test bodies.
+ # 'unsafe-inline' is added as a workaround here. This is probably not so
+ # bad, as it shouldn't intefere non-inline-script requests that we want to
+ # test.
+ if value == 'script-src-wildcard':
+ return "script-src * 'unsafe-inline'"
+ if value == 'script-src-self':
+ return "script-src 'self' 'unsafe-inline'"
+ # Workaround for "script-src 'none'" would be more complicated, because
+ # - "script-src 'none' 'unsafe-inline'" is handled somehow differently from
+ # "script-src 'none'", i.e.
+ # https://w3c.github.io/webappsec-csp/#match-url-to-source-list Step 3
+ # handles the latter but not the former.
+ # - We need nonce- or path-based additional values to allow same-origin
+ # test scripts like testharness.js.
+ # Therefore, we disable 'script-src-none' tests for now in
+ # `/content-security-policy/spec.src.json`.
+ if value == 'script-src-none':
+ return "script-src 'none'"
+
+ # worker-src
+ if value == 'worker-src-wildcard':
+ return 'worker-src *'
+ if value == 'worker-src-self':
+ return "worker-src 'self'"
+ if value == 'worker-src-none':
+ return "worker-src 'none'"
+ raise Exception('Invalid delivery_value: %s' % value)
+
+def handle_deliveries(policy_deliveries):
+ '''
+ Generate <meta> elements and HTTP headers for the given list of
+ PolicyDelivery.
+ TODO(hiroshige): Merge duplicated code here, scope/document.py, etc.
+ '''
+
+ meta = ''
+ headers = {}
+
+ for delivery in policy_deliveries:
+ if delivery.value is None:
+ continue
+ if delivery.key == 'referrerPolicy':
+ if delivery.delivery_type == 'meta':
+ meta += \
+ '<meta name="referrer" content="%s">' % delivery.value
+ elif delivery.delivery_type == 'http-rp':
+ headers['Referrer-Policy'] = delivery.value
+ # TODO(kristijanburnik): Limit to WPT origins.
+ headers['Access-Control-Allow-Origin'] = '*'
+ else:
+ raise Exception(
+ 'Invalid delivery_type: %s' % delivery.delivery_type)
+ elif delivery.key == 'mixedContent':
+ assert (delivery.value == 'opt-in')
+ if delivery.delivery_type == 'meta':
+ meta += '<meta http-equiv="Content-Security-Policy" ' + \
+ 'content="block-all-mixed-content">'
+ elif delivery.delivery_type == 'http-rp':
+ headers['Content-Security-Policy'] = 'block-all-mixed-content'
+ else:
+ raise Exception(
+ 'Invalid delivery_type: %s' % delivery.delivery_type)
+ elif delivery.key == 'contentSecurityPolicy':
+ csp_value = get_csp_value(delivery.value)
+ if delivery.delivery_type == 'meta':
+ meta += '<meta http-equiv="Content-Security-Policy" ' + \
+ 'content="' + csp_value + '">'
+ elif delivery.delivery_type == 'http-rp':
+ headers['Content-Security-Policy'] = csp_value
+ else:
+ raise Exception(
+ 'Invalid delivery_type: %s' % delivery.delivery_type)
+ elif delivery.key == 'upgradeInsecureRequests':
+ # https://w3c.github.io/webappsec-upgrade-insecure-requests/#delivery
+ assert (delivery.value == 'upgrade')
+ if delivery.delivery_type == 'meta':
+ meta += '<meta http-equiv="Content-Security-Policy" ' + \
+ 'content="upgrade-insecure-requests">'
+ elif delivery.delivery_type == 'http-rp':
+ headers[
+ 'Content-Security-Policy'] = 'upgrade-insecure-requests'
+ else:
+ raise Exception(
+ 'Invalid delivery_type: %s' % delivery.delivery_type)
+ else:
+ raise Exception('Invalid delivery_key: %s' % delivery.key)
+ return {"meta": meta, "headers": headers}
+
+
+def generate_selection(spec_json, selection):
+ '''
+ Returns a scenario object (with a top-level source_context_list entry,
+ which will be removed in generate_test_file() later).
+ '''
+
+ target_policy_delivery = util.PolicyDelivery(selection['delivery_type'],
+ selection['delivery_key'],
+ selection['delivery_value'])
+ del selection['delivery_type']
+ del selection['delivery_key']
+ del selection['delivery_value']
+
+ # Parse source context list and policy deliveries of source contexts.
+ # `util.ShouldSkip()` exceptions are raised if e.g. unsuppported
+ # combinations of source contexts and policy deliveries are used.
+ source_context_list_scheme = spec_json['source_context_list_schema'][
+ selection['source_context_list']]
+ selection['source_context_list'] = [
+ util.SourceContext.from_json(source_context, target_policy_delivery,
+ spec_json['source_context_schema'])
+ for source_context in source_context_list_scheme['sourceContextList']
+ ]
+
+ # Check if the subresource is supported by the innermost source context.
+ innermost_source_context = selection['source_context_list'][-1]
+ supported_subresource = spec_json['source_context_schema'][
+ 'supported_subresource'][innermost_source_context.source_context_type]
+ if supported_subresource != '*':
+ if selection['subresource'] not in supported_subresource:
+ raise util.ShouldSkip()
+
+ # Parse subresource policy deliveries.
+ selection[
+ 'subresource_policy_deliveries'] = util.PolicyDelivery.list_from_json(
+ source_context_list_scheme['subresourcePolicyDeliveries'],
+ target_policy_delivery, spec_json['subresource_schema']
+ ['supported_delivery_type'][selection['subresource']])
+
+ # Generate per-scenario test description.
+ selection['test_description'] = spec_json[
+ 'test_description_template'] % selection
+
+ return selection
+
+
+def generate_test_file(spec_directory, test_helper_filenames,
+ test_html_template_basename, test_filename, scenarios):
+ '''
+ Generates a test HTML file (and possibly its associated .headers file)
+ from `scenarios`.
+ '''
+
+ # Scenarios for the same file should have the same `source_context_list`,
+ # including the top-level one.
+ # Note: currently, non-top-level source contexts aren't necessarily required
+ # to be the same, but we set this requirement as it will be useful e.g. when
+ # we e.g. reuse a worker among multiple scenarios.
+ for scenario in scenarios:
+ assert (scenario['source_context_list'] == scenarios[0]
+ ['source_context_list'])
+
+ # We process the top source context below, and do not include it in
+ # the JSON objects (i.e. `scenarios`) in generated HTML files.
+ top_source_context = scenarios[0]['source_context_list'].pop(0)
+ assert (top_source_context.source_context_type == 'top')
+ for scenario in scenarios[1:]:
+ assert (scenario['source_context_list'].pop(0) == top_source_context)
+
+ parameters = {}
+
+ # Sort scenarios, to avoid unnecessary diffs due to different orders in
+ # `scenarios`.
+ serialized_scenarios = sorted(
+ [dump_test_parameters(scenario) for scenario in scenarios])
+
+ parameters['scenarios'] = ",\n".join(serialized_scenarios).replace(
+ "\n", "\n" + " " * 10)
+
+ test_directory = os.path.dirname(test_filename)
+
+ parameters['helper_js'] = ""
+ for test_helper_filename in test_helper_filenames:
+ parameters['helper_js'] += ' <script src="%s"></script>\n' % (
+ os.path.relpath(test_helper_filename, test_directory))
+ parameters['sanity_checker_js'] = os.path.relpath(
+ os.path.join(spec_directory, 'generic', 'sanity-checker.js'),
+ test_directory)
+ parameters['spec_json_js'] = os.path.relpath(
+ os.path.join(spec_directory, 'generic', 'spec_json.js'),
+ test_directory)
+
+ test_headers_filename = test_filename + ".headers"
+
+ test_html_template = util.get_template(test_html_template_basename)
+ disclaimer_template = util.get_template('disclaimer.template')
+
+ html_template_filename = os.path.join(util.template_directory,
+ test_html_template_basename)
+ generated_disclaimer = disclaimer_template \
+ % {'generating_script_filename': os.path.relpath(sys.argv[0],
+ util.test_root_directory),
+ 'spec_directory': os.path.relpath(spec_directory,
+ util.test_root_directory)}
+
+ # Adjust the template for the test invoking JS. Indent it to look nice.
+ parameters['generated_disclaimer'] = generated_disclaimer.rstrip()
+
+ # Directory for the test files.
+ try:
+ os.makedirs(test_directory)
+ except:
+ pass
+
+ delivery = handle_deliveries(top_source_context.policy_deliveries)
+
+ if len(delivery['headers']) > 0:
+ with open(test_headers_filename, "w") as f:
+ for header in delivery['headers']:
+ f.write('%s: %s\n' % (header, delivery['headers'][header]))
+
+ parameters['meta_delivery_method'] = delivery['meta']
+ # Obey the lint and pretty format.
+ if len(parameters['meta_delivery_method']) > 0:
+ parameters['meta_delivery_method'] = "\n " + \
+ parameters['meta_delivery_method']
+
+ # Write out the generated HTML file.
+ util.write_file(test_filename, test_html_template % parameters)
+
+
+def generate_test_source_files(spec_directory, test_helper_filenames,
+ spec_json, target):
+ test_expansion_schema = spec_json['test_expansion_schema']
+ specification = spec_json['specification']
+
+ if target == "debug":
+ spec_json_js_template = util.get_template('spec_json.js.template')
+ util.write_file(
+ os.path.join(spec_directory, "generic", "spec_json.js"),
+ spec_json_js_template % {'spec_json': json.dumps(spec_json)})
+ util.write_file(
+ os.path.join(spec_directory, "generic",
+ "debug-output.spec.src.json"),
+ json.dumps(spec_json, indent=2, separators=(',', ': ')))
+
+ # Choose a debug/release template depending on the target.
+ html_template = "test.%s.html.template" % target
+
+ artifact_order = test_expansion_schema.keys()
+ artifact_order.remove('expansion')
+
+ excluded_selection_pattern = ''
+ for key in artifact_order:
+ excluded_selection_pattern += '%(' + key + ')s/'
+
+ # Create list of excluded tests.
+ exclusion_dict = set()
+ for excluded_pattern in spec_json['excluded_tests']:
+ excluded_expansion = \
+ expand_pattern(excluded_pattern, test_expansion_schema)
+ for excluded_selection in permute_expansion(excluded_expansion,
+ artifact_order):
+ excluded_selection['delivery_key'] = spec_json['delivery_key']
+ exclusion_dict.add(excluded_selection_pattern % excluded_selection)
+
+ # `scenarios[filename]` represents the list of scenario objects to be
+ # generated into `filename`.
+ scenarios = {}
+
+ for spec in specification:
+ # Used to make entries with expansion="override" override preceding
+ # entries with the same |selection_path|.
+ output_dict = {}
+
+ for expansion_pattern in spec['test_expansion']:
+ expansion = expand_pattern(expansion_pattern,
+ test_expansion_schema)
+ for selection in permute_expansion(expansion, artifact_order):
+ selection['delivery_key'] = spec_json['delivery_key']
+ selection_path = spec_json['selection_pattern'] % selection
+ if selection_path in output_dict:
+ if expansion_pattern['expansion'] != 'override':
+ print("Error: expansion is default in:")
+ print(dump_test_parameters(selection))
+ print("but overrides:")
+ print(dump_test_parameters(
+ output_dict[selection_path]))
+ sys.exit(1)
+ output_dict[selection_path] = copy.deepcopy(selection)
+
+ for selection_path in output_dict:
+ selection = output_dict[selection_path]
+ if (excluded_selection_pattern % selection) in exclusion_dict:
+ print('Excluding selection:', selection_path)
+ continue
+ try:
+ test_filename = get_test_filename(spec_directory, spec_json,
+ selection)
+ scenario = generate_selection(spec_json, selection)
+ scenarios[test_filename] = scenarios.get(test_filename,
+ []) + [scenario]
+ except util.ShouldSkip:
+ continue
+
+ for filename in scenarios:
+ generate_test_file(spec_directory, test_helper_filenames,
+ html_template, filename, scenarios[filename])
+
+
+def merge_json(base, child):
+ for key in child:
+ if key not in base:
+ base[key] = child[key]
+ continue
+ # `base[key]` and `child[key]` both exists.
+ if isinstance(base[key], list) and isinstance(child[key], list):
+ base[key].extend(child[key])
+ elif isinstance(base[key], dict) and isinstance(child[key], dict):
+ merge_json(base[key], child[key])
+ else:
+ base[key] = child[key]
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Test suite generator utility')
+ parser.add_argument(
+ '-t',
+ '--target',
+ type=str,
+ choices=("release", "debug"),
+ default="release",
+ help='Sets the appropriate template for generating tests')
+ parser.add_argument(
+ '-s',
+ '--spec',
+ type=str,
+ default=os.getcwd(),
+ help='Specify a file used for describing and generating the tests')
+ # TODO(kristijanburnik): Add option for the spec_json file.
+ args = parser.parse_args()
+
+ spec_directory = os.path.abspath(args.spec)
+
+ # Read `spec.src.json` files, starting from `spec_directory`, and
+ # continuing to parent directories as long as `spec.src.json` exists.
+ spec_filenames = []
+ test_helper_filenames = []
+ spec_src_directory = spec_directory
+ while len(spec_src_directory) >= len(util.test_root_directory):
+ spec_filename = os.path.join(spec_src_directory, "spec.src.json")
+ if not os.path.exists(spec_filename):
+ break
+ spec_filenames.append(spec_filename)
+ test_filename = os.path.join(spec_src_directory, 'generic',
+ 'test-case.sub.js')
+ assert (os.path.exists(test_filename))
+ test_helper_filenames.append(test_filename)
+ spec_src_directory = os.path.abspath(
+ os.path.join(spec_src_directory, ".."))
+
+ spec_filenames = list(reversed(spec_filenames))
+ test_helper_filenames = list(reversed(test_helper_filenames))
+
+ if len(spec_filenames) == 0:
+ print('Error: No spec.src.json is found at %s.' % spec_directory)
+ return
+
+ # Load the default spec JSON file, ...
+ default_spec_filename = os.path.join(util.script_directory,
+ 'spec.src.json')
+ spec_json = collections.OrderedDict()
+ if os.path.exists(default_spec_filename):
+ spec_json = util.load_spec_json(default_spec_filename)
+
+ # ... and then make spec JSON files in subdirectories override the default.
+ for spec_filename in spec_filenames:
+ child_spec_json = util.load_spec_json(spec_filename)
+ merge_json(spec_json, child_spec_json)
+
+ spec_validator.assert_valid_spec_json(spec_json)
+ generate_test_source_files(spec_directory, test_helper_filenames,
+ spec_json, args.target)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/common/security-features/tools/spec.src.json b/testing/web-platform/tests/common/security-features/tools/spec.src.json
new file mode 100644
index 0000000000..4a84493f47
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/spec.src.json
@@ -0,0 +1,533 @@
+{
+ "selection_pattern": "%(source_context_list)s.%(delivery_type)s/%(delivery_value)s/%(subresource)s/%(origin)s.%(redirection)s.%(source_scheme)s",
+ "test_file_path_pattern": "gen/%(source_context_list)s.%(delivery_type)s/%(delivery_value)s/%(subresource)s.%(source_scheme)s.html",
+ "excluded_tests": [
+ {
+ // Workers are same-origin only
+ "expansion": "*",
+ "source_scheme": "*",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": "*",
+ "subresource": [
+ "worker-classic",
+ "worker-module",
+ "sharedworker-classic",
+ "sharedworker-module"
+ ],
+ "origin": [
+ "cross-https",
+ "cross-http",
+ "cross-http-downgrade",
+ "cross-wss",
+ "cross-ws",
+ "cross-ws-downgrade"
+ ],
+ "expectation": "*"
+ },
+ {
+ // Workers are same-origin only (redirects)
+ "expansion": "*",
+ "source_scheme": "*",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": [
+ "swap-origin",
+ "swap-scheme"
+ ],
+ "subresource": [
+ "worker-classic",
+ "worker-module",
+ "sharedworker-classic",
+ "sharedworker-module"
+ ],
+ "origin": "*",
+ "expectation": "*"
+ },
+ {
+ // Websockets are ws/wss-only
+ "expansion": "*",
+ "source_scheme": "*",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": "*",
+ "subresource": "websocket",
+ "origin": [
+ "same-https",
+ "same-http",
+ "same-http-downgrade",
+ "cross-https",
+ "cross-http",
+ "cross-http-downgrade"
+ ],
+ "expectation": "*"
+ },
+ {
+ // Redirects are intentionally forbidden in browsers:
+ // https://fetch.spec.whatwg.org/#concept-websocket-establish
+ // Websockets are no-redirect only
+ "expansion": "*",
+ "source_scheme": "*",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": [
+ "keep-origin",
+ "swap-origin",
+ "keep-scheme",
+ "swap-scheme",
+ "downgrade"
+ ],
+ "subresource": "websocket",
+ "origin": "*",
+ "expectation": "*"
+ },
+ {
+ // ws/wss are websocket-only
+ "expansion": "*",
+ "source_scheme": "*",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": "*",
+ "subresource": [
+ "a-tag",
+ "area-tag",
+ "audio-tag",
+ "beacon",
+ "fetch",
+ "iframe-tag",
+ "img-tag",
+ "link-css-tag",
+ "link-prefetch-tag",
+ "object-tag",
+ "picture-tag",
+ "script-tag",
+ "script-tag-dynamic-import",
+ "sharedworker-classic",
+ "sharedworker-import",
+ "sharedworker-import-data",
+ "sharedworker-module",
+ "video-tag",
+ "worker-classic",
+ "worker-import",
+ "worker-import-data",
+ "worker-module",
+ "worklet-animation",
+ "worklet-animation-import-data",
+ "worklet-audio",
+ "worklet-audio-import-data",
+ "worklet-layout",
+ "worklet-layout-import-data",
+ "worklet-paint",
+ "worklet-paint-import-data",
+ "xhr"
+ ],
+ "origin": [
+ "same-wss",
+ "same-ws",
+ "same-ws-downgrade",
+ "cross-wss",
+ "cross-ws",
+ "cross-ws-downgrade"
+ ],
+ "expectation": "*"
+ },
+ {
+ // Worklets are HTTPS contexts only
+ "expansion": "*",
+ "source_scheme": "http",
+ "source_context_list": "*",
+ "delivery_type": "*",
+ "delivery_value": "*",
+ "redirection": "*",
+ "subresource": [
+ "worklet-animation",
+ "worklet-animation-import-data",
+ "worklet-audio",
+ "worklet-audio-import-data",
+ "worklet-layout",
+ "worklet-layout-import-data",
+ "worklet-paint",
+ "worklet-paint-import-data"
+ ],
+ "origin": "*",
+ "expectation": "*"
+ }
+ ],
+ "source_context_schema": {
+ "supported_subresource": {
+ "top": "*",
+ "iframe": "*",
+ "iframe-blank": "*",
+ "srcdoc": "*",
+ "worker-classic": [
+ "xhr",
+ "fetch",
+ "websocket",
+ "worker-classic",
+ "worker-module"
+ ],
+ "worker-module": [
+ "xhr",
+ "fetch",
+ "websocket",
+ "worker-classic",
+ "worker-module"
+ ],
+ "worker-classic-data": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ],
+ "worker-module-data": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ],
+ "sharedworker-classic": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ],
+ "sharedworker-module": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ],
+ "sharedworker-classic-data": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ],
+ "sharedworker-module-data": [
+ "xhr",
+ "fetch",
+ "websocket"
+ ]
+ }
+ },
+ "source_context_list_schema": {
+ // Warning: Currently, some nested patterns of contexts have different
+ // inheritance rules for different kinds of policies.
+ // The generated tests will be used to test/investigate the policy
+ // inheritance rules, and eventually the policy inheritance rules will
+ // be unified (https://github.com/w3ctag/design-principles/issues/111).
+ "top": {
+ "description": "Policy set by the top-level Document",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "req": {
+ "description": "Subresource request's policy should override Document's policy",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": [
+ "nonNullPolicy"
+ ]
+ },
+ "srcdoc-inherit": {
+ "description": "srcdoc iframe without its own policy should inherit parent Document's policy",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "srcdoc"
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "srcdoc": {
+ "description": "srcdoc iframe's policy should override parent Document's policy",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "srcdoc",
+ "policyDeliveries": [
+ "nonNullPolicy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "iframe": {
+ "description": "external iframe's policy should override parent Document's policy",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "iframe",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "iframe-blank-inherit": {
+ "description": "blank iframe should inherit parent Document's policy",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "iframe-blank"
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "worker-classic": {
+ // This is applicable to referrer-policy tests.
+ // Use "worker-classic-inherit" for CSP (mixed-content, etc.).
+ "description": "dedicated workers shouldn't inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "worker-classic",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "worker-classic-data": {
+ "description": "data: dedicated workers should inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "worker-classic-data",
+ "policyDeliveries": []
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "worker-module": {
+ // This is applicable to referrer-policy tests.
+ "description": "dedicated workers shouldn't inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "worker-module",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "worker-module-data": {
+ "description": "data: dedicated workers should inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "worker-module-data",
+ "policyDeliveries": []
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "sharedworker-classic": {
+ "description": "shared workers shouldn't inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "sharedworker-classic",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "sharedworker-classic-data": {
+ "description": "data: shared workers should inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "sharedworker-classic-data",
+ "policyDeliveries": []
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "sharedworker-module": {
+ "description": "shared workers shouldn't inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "anotherPolicy"
+ ]
+ },
+ {
+ "sourceContextType": "sharedworker-module",
+ "policyDeliveries": [
+ "policy"
+ ]
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ },
+ "sharedworker-module-data": {
+ "description": "data: shared workers should inherit its parent's policy.",
+ "sourceContextList": [
+ {
+ "sourceContextType": "top",
+ "policyDeliveries": [
+ "policy"
+ ]
+ },
+ {
+ "sourceContextType": "sharedworker-module-data",
+ "policyDeliveries": []
+ }
+ ],
+ "subresourcePolicyDeliveries": []
+ }
+ },
+ "test_expansion_schema": {
+ "expansion": [
+ "default",
+ "override"
+ ],
+ "source_scheme": [
+ "http",
+ "https"
+ ],
+ "source_context_list": [
+ "top",
+ "req",
+ "srcdoc-inherit",
+ "srcdoc",
+ "iframe",
+ "iframe-blank-inherit",
+ "worker-classic",
+ "worker-classic-data",
+ "worker-module",
+ "worker-module-data",
+ "sharedworker-classic",
+ "sharedworker-classic-data",
+ "sharedworker-module",
+ "sharedworker-module-data"
+ ],
+ "redirection": [
+ "no-redirect",
+ "keep-origin",
+ "swap-origin",
+ "keep-scheme",
+ "swap-scheme",
+ "downgrade"
+ ],
+ "origin": [
+ "same-https",
+ "same-http",
+ "same-http-downgrade",
+ "cross-https",
+ "cross-http",
+ "cross-http-downgrade",
+ "same-wss",
+ "same-ws",
+ "same-ws-downgrade",
+ "cross-wss",
+ "cross-ws",
+ "cross-ws-downgrade"
+ ],
+ "subresource": [
+ "a-tag",
+ "area-tag",
+ "audio-tag",
+ "beacon",
+ "fetch",
+ "iframe-tag",
+ "img-tag",
+ "link-css-tag",
+ "link-prefetch-tag",
+ "object-tag",
+ "picture-tag",
+ "script-tag",
+ "script-tag-dynamic-import",
+ "sharedworker-classic",
+ "sharedworker-import",
+ "sharedworker-import-data",
+ "sharedworker-module",
+ "video-tag",
+ "websocket",
+ "worker-classic",
+ "worker-import",
+ "worker-import-data",
+ "worker-module",
+ "worklet-animation",
+ "worklet-animation-import-data",
+ "worklet-audio",
+ "worklet-audio-import-data",
+ "worklet-layout",
+ "worklet-layout-import-data",
+ "worklet-paint",
+ "worklet-paint-import-data",
+ "xhr"
+ ]
+ }
+}
diff --git a/testing/web-platform/tests/common/security-features/tools/spec_validator.py b/testing/web-platform/tests/common/security-features/tools/spec_validator.py
new file mode 100755
index 0000000000..3ac3f53016
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/spec_validator.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python3
+
+from __future__ import print_function
+
+import json, sys
+
+
+def assert_non_empty_string(obj, field):
+ assert field in obj, 'Missing field "%s"' % field
+ assert isinstance(obj[field], basestring), \
+ 'Field "%s" must be a string' % field
+ assert len(obj[field]) > 0, 'Field "%s" must not be empty' % field
+
+
+def assert_non_empty_list(obj, field):
+ assert isinstance(obj[field], list), \
+ '%s must be a list' % field
+ assert len(obj[field]) > 0, \
+ '%s list must not be empty' % field
+
+
+def assert_non_empty_dict(obj, field):
+ assert isinstance(obj[field], dict), \
+ '%s must be a dict' % field
+ assert len(obj[field]) > 0, \
+ '%s dict must not be empty' % field
+
+
+def assert_contains(obj, field):
+ assert field in obj, 'Must contain field "%s"' % field
+
+
+def assert_value_from(obj, field, items):
+ assert obj[field] in items, \
+ 'Field "%s" must be from: %s' % (field, str(items))
+
+
+def assert_atom_or_list_items_from(obj, field, items):
+ if isinstance(obj[field], basestring) or isinstance(
+ obj[field], int) or obj[field] is None:
+ assert_value_from(obj, field, items)
+ return
+
+ assert isinstance(obj[field], list), '%s must be a list' % field
+ for allowed_value in obj[field]:
+ assert allowed_value != '*', "Wildcard is not supported for lists!"
+ assert allowed_value in items, \
+ 'Field "%s" must be from: %s' % (field, str(items))
+
+
+def assert_contains_only_fields(obj, expected_fields):
+ for expected_field in expected_fields:
+ assert_contains(obj, expected_field)
+
+ for actual_field in obj:
+ assert actual_field in expected_fields, \
+ 'Unexpected field "%s".' % actual_field
+
+
+def leaf_values(schema):
+ if isinstance(schema, list):
+ return schema
+ ret = []
+ for _, sub_schema in schema.iteritems():
+ ret += leaf_values(sub_schema)
+ return ret
+
+
+def assert_value_unique_in(value, used_values):
+ assert value not in used_values, 'Duplicate value "%s"!' % str(value)
+ used_values[value] = True
+
+
+def assert_valid_artifact(exp_pattern, artifact_key, schema):
+ if isinstance(schema, list):
+ assert_atom_or_list_items_from(exp_pattern, artifact_key,
+ ["*"] + schema)
+ return
+
+ for sub_artifact_key, sub_schema in schema.iteritems():
+ assert_valid_artifact(exp_pattern[artifact_key], sub_artifact_key,
+ sub_schema)
+
+
+def validate(spec_json, details):
+ """ Validates the json specification for generating tests. """
+
+ details['object'] = spec_json
+ assert_contains_only_fields(spec_json, [
+ "selection_pattern", "test_file_path_pattern",
+ "test_description_template", "test_page_title_template",
+ "specification", "delivery_key", "subresource_schema",
+ "source_context_schema", "source_context_list_schema",
+ "test_expansion_schema", "excluded_tests"
+ ])
+ assert_non_empty_list(spec_json, "specification")
+ assert_non_empty_dict(spec_json, "test_expansion_schema")
+ assert_non_empty_list(spec_json, "excluded_tests")
+
+ specification = spec_json['specification']
+ test_expansion_schema = spec_json['test_expansion_schema']
+ excluded_tests = spec_json['excluded_tests']
+
+ valid_test_expansion_fields = test_expansion_schema.keys()
+
+ # Should be consistent with `sourceContextMap` in
+ # `/common/security-features/resources/common.sub.js`.
+ valid_source_context_names = [
+ "top", "iframe", "iframe-blank", "srcdoc", "worker-classic",
+ "worker-module", "worker-classic-data", "worker-module-data",
+ "sharedworker-classic", "sharedworker-module",
+ "sharedworker-classic-data", "sharedworker-module-data"
+ ]
+
+ valid_subresource_names = [
+ "a-tag", "area-tag", "audio-tag", "form-tag", "iframe-tag", "img-tag",
+ "link-css-tag", "link-prefetch-tag", "object-tag", "picture-tag",
+ "script-tag", "script-tag-dynamic-import", "video-tag"
+ ] + ["beacon", "fetch", "xhr", "websocket"] + [
+ "worker-classic", "worker-module", "worker-import",
+ "worker-import-data", "sharedworker-classic", "sharedworker-module",
+ "sharedworker-import", "sharedworker-import-data",
+ "serviceworker-classic", "serviceworker-module",
+ "serviceworker-import", "serviceworker-import-data"
+ ] + [
+ "worklet-animation", "worklet-audio", "worklet-layout",
+ "worklet-paint", "worklet-animation-import", "worklet-audio-import",
+ "worklet-layout-import", "worklet-paint-import",
+ "worklet-animation-import-data", "worklet-audio-import-data",
+ "worklet-layout-import-data", "worklet-paint-import-data"
+ ]
+
+ # Validate each single spec.
+ for spec in specification:
+ details['object'] = spec
+
+ # Validate required fields for a single spec.
+ assert_contains_only_fields(spec, [
+ 'title', 'description', 'specification_url', 'test_expansion'
+ ])
+ assert_non_empty_string(spec, 'title')
+ assert_non_empty_string(spec, 'description')
+ assert_non_empty_string(spec, 'specification_url')
+ assert_non_empty_list(spec, 'test_expansion')
+
+ for spec_exp in spec['test_expansion']:
+ details['object'] = spec_exp
+ assert_contains_only_fields(spec_exp, valid_test_expansion_fields)
+
+ for artifact in test_expansion_schema:
+ details['test_expansion_field'] = artifact
+ assert_valid_artifact(spec_exp, artifact,
+ test_expansion_schema[artifact])
+ del details['test_expansion_field']
+
+ # Validate source_context_schema.
+ details['object'] = spec_json['source_context_schema']
+ assert_contains_only_fields(
+ spec_json['source_context_schema'],
+ ['supported_delivery_type', 'supported_subresource'])
+ assert_contains_only_fields(
+ spec_json['source_context_schema']['supported_delivery_type'],
+ valid_source_context_names)
+ for source_context in spec_json['source_context_schema'][
+ 'supported_delivery_type']:
+ assert_valid_artifact(
+ spec_json['source_context_schema']['supported_delivery_type'],
+ source_context, test_expansion_schema['delivery_type'])
+ assert_contains_only_fields(
+ spec_json['source_context_schema']['supported_subresource'],
+ valid_source_context_names)
+ for source_context in spec_json['source_context_schema'][
+ 'supported_subresource']:
+ assert_valid_artifact(
+ spec_json['source_context_schema']['supported_subresource'],
+ source_context, leaf_values(test_expansion_schema['subresource']))
+
+ # Validate subresource_schema.
+ details['object'] = spec_json['subresource_schema']
+ assert_contains_only_fields(spec_json['subresource_schema'],
+ ['supported_delivery_type'])
+ assert_contains_only_fields(
+ spec_json['subresource_schema']['supported_delivery_type'],
+ leaf_values(test_expansion_schema['subresource']))
+ for subresource in spec_json['subresource_schema'][
+ 'supported_delivery_type']:
+ assert_valid_artifact(
+ spec_json['subresource_schema']['supported_delivery_type'],
+ subresource, test_expansion_schema['delivery_type'])
+
+ # Validate the test_expansion schema members.
+ details['object'] = test_expansion_schema
+ assert_contains_only_fields(test_expansion_schema, [
+ 'expansion', 'source_scheme', 'source_context_list', 'delivery_type',
+ 'delivery_value', 'redirection', 'subresource', 'origin', 'expectation'
+ ])
+ assert_atom_or_list_items_from(test_expansion_schema, 'expansion',
+ ['default', 'override'])
+ assert_atom_or_list_items_from(test_expansion_schema, 'source_scheme',
+ ['http', 'https'])
+ assert_atom_or_list_items_from(
+ test_expansion_schema, 'source_context_list',
+ spec_json['source_context_list_schema'].keys())
+
+ # Should be consistent with `preprocess_redirection` in
+ # `/common/security-features/subresource/subresource.py`.
+ assert_atom_or_list_items_from(test_expansion_schema, 'redirection', [
+ 'no-redirect', 'keep-origin', 'swap-origin', 'keep-scheme',
+ 'swap-scheme', 'downgrade'
+ ])
+ for subresource in leaf_values(test_expansion_schema['subresource']):
+ assert subresource in valid_subresource_names, "Invalid subresource %s" % subresource
+ # Should be consistent with getSubresourceOrigin() in
+ # `/common/security-features/resources/common.sub.js`.
+ assert_atom_or_list_items_from(test_expansion_schema, 'origin', [
+ 'same-http', 'same-https', 'same-ws', 'same-wss', 'cross-http',
+ 'cross-https', 'cross-ws', 'cross-wss', 'same-http-downgrade',
+ 'cross-http-downgrade', 'same-ws-downgrade', 'cross-ws-downgrade'
+ ])
+
+ # Validate excluded tests.
+ details['object'] = excluded_tests
+ for excluded_test_expansion in excluded_tests:
+ assert_contains_only_fields(excluded_test_expansion,
+ valid_test_expansion_fields)
+ details['object'] = excluded_test_expansion
+ for artifact in test_expansion_schema:
+ details['test_expansion_field'] = artifact
+ assert_valid_artifact(excluded_test_expansion, artifact,
+ test_expansion_schema[artifact])
+ del details['test_expansion_field']
+
+ del details['object']
+
+
+def assert_valid_spec_json(spec_json):
+ error_details = {}
+ try:
+ validate(spec_json, error_details)
+ except AssertionError as err:
+ print('ERROR:', err.message)
+ print(json.dumps(error_details, indent=4))
+ sys.exit(1)
+
+
+def main():
+ spec_json = load_spec_json()
+ assert_valid_spec_json(spec_json)
+ print("Spec JSON is valid.")
+
+
+if __name__ == '__main__':
+ main()
diff --git a/testing/web-platform/tests/common/security-features/tools/template/disclaimer.template b/testing/web-platform/tests/common/security-features/tools/template/disclaimer.template
new file mode 100644
index 0000000000..ba9458cb31
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/template/disclaimer.template
@@ -0,0 +1 @@
+<!-- DO NOT EDIT! Generated by `%(generating_script_filename)s --spec %(spec_directory)s/` -->
diff --git a/testing/web-platform/tests/common/security-features/tools/template/spec_json.js.template b/testing/web-platform/tests/common/security-features/tools/template/spec_json.js.template
new file mode 100644
index 0000000000..e4cbd03425
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/template/spec_json.js.template
@@ -0,0 +1 @@
+var SPEC_JSON = %(spec_json)s;
diff --git a/testing/web-platform/tests/common/security-features/tools/template/test.debug.html.template b/testing/web-platform/tests/common/security-features/tools/template/test.debug.html.template
new file mode 100644
index 0000000000..b6be088f61
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/template/test.debug.html.template
@@ -0,0 +1,26 @@
+<!DOCTYPE html>
+%(generated_disclaimer)s
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="timeout" content="long">%(meta_delivery_method)s
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/common/security-features/resources/common.sub.js"></script>
+ <!-- The original specification JSON for validating the scenario. -->
+ <script src="%(spec_json_js)s"></script>
+ <!-- Internal checking of the tests -->
+ <script src="%(sanity_checker_js)s"></script>
+%(helper_js)s </head>
+ <body>
+ <script>
+ TestCase(
+ [
+ %(scenarios)s
+ ],
+ new SanityChecker()
+ ).start();
+ </script>
+ <div id="log"></div>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/common/security-features/tools/template/test.release.html.template b/testing/web-platform/tests/common/security-features/tools/template/test.release.html.template
new file mode 100644
index 0000000000..bac2d5b5a4
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/template/test.release.html.template
@@ -0,0 +1,22 @@
+<!DOCTYPE html>
+%(generated_disclaimer)s
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="timeout" content="long">%(meta_delivery_method)s
+ <script src="/resources/testharness.js"></script>
+ <script src="/resources/testharnessreport.js"></script>
+ <script src="/common/security-features/resources/common.sub.js"></script>
+%(helper_js)s </head>
+ <body>
+ <script>
+ TestCase(
+ [
+ %(scenarios)s
+ ],
+ new SanityChecker()
+ ).start();
+ </script>
+ <div id="log"></div>
+ </body>
+</html>
diff --git a/testing/web-platform/tests/common/security-features/tools/util.py b/testing/web-platform/tests/common/security-features/tools/util.py
new file mode 100644
index 0000000000..72541c7814
--- /dev/null
+++ b/testing/web-platform/tests/common/security-features/tools/util.py
@@ -0,0 +1,230 @@
+from __future__ import print_function
+
+import os, sys, json, json5, re
+import collections
+
+script_directory = os.path.dirname(os.path.abspath(__file__))
+template_directory = os.path.abspath(
+ os.path.join(script_directory, 'template'))
+test_root_directory = os.path.abspath(
+ os.path.join(script_directory, '..', '..', '..'))
+
+
+def get_template(basename):
+ with open(os.path.join(template_directory, basename), "r") as f:
+ return f.read()
+
+
+def write_file(filename, contents):
+ with open(filename, "w") as f:
+ f.write(contents)
+
+
+def read_nth_line(fp, line_number):
+ fp.seek(0)
+ for i, line in enumerate(fp):
+ if (i + 1) == line_number:
+ return line
+
+
+def load_spec_json(path_to_spec):
+ re_error_location = re.compile('line ([0-9]+) column ([0-9]+)')
+ with open(path_to_spec, "r") as f:
+ try:
+ return json5.load(f, object_pairs_hook=collections.OrderedDict)
+ except ValueError as ex:
+ print(ex.message)
+ match = re_error_location.search(ex.message)
+ if match:
+ line_number, column = int(match.group(1)), int(match.group(2))
+ print(read_nth_line(f, line_number).rstrip())
+ print(" " * (column - 1) + "^")
+ sys.exit(1)
+
+
+class ShouldSkip(Exception):
+ '''
+ Raised when the given combination of subresource type, source context type,
+ delivery type etc. are not supported and we should skip that configuration.
+ ShouldSkip is expected in normal generator execution (and thus subsequent
+ generation continues), as we first enumerate a broad range of configurations
+ first, and later raise ShouldSkip to filter out unsupported combinations.
+
+ ShouldSkip is distinguished from other general errors that cause immediate
+ termination of the generator and require fix.
+ '''
+ def __init__(self):
+ pass
+
+
+class PolicyDelivery(object):
+ '''
+ See `@typedef PolicyDelivery` comments in
+ `common/security-features/resources/common.sub.js`.
+ '''
+
+ def __init__(self, delivery_type, key, value):
+ self.delivery_type = delivery_type
+ self.key = key
+ self.value = value
+
+ def __eq__(self, other):
+ return type(self) is type(other) and self.__dict__ == other.__dict__
+
+ @classmethod
+ def list_from_json(cls, list, target_policy_delivery,
+ supported_delivery_types):
+ # type: (dict, PolicyDelivery, typing.List[str]) -> typing.List[PolicyDelivery]
+ '''
+ Parses a JSON object `list` that represents a list of `PolicyDelivery`
+ and returns a list of `PolicyDelivery`, plus supporting placeholders
+ (see `from_json()` comments below or
+ `common/security-features/README.md`).
+
+ Can raise `ShouldSkip`.
+ '''
+ if list is None:
+ return []
+
+ out = []
+ for obj in list:
+ policy_delivery = PolicyDelivery.from_json(
+ obj, target_policy_delivery, supported_delivery_types)
+ # Drop entries with null values.
+ if policy_delivery.value is None:
+ continue
+ out.append(policy_delivery)
+ return out
+
+ @classmethod
+ def from_json(cls, obj, target_policy_delivery, supported_delivery_types):
+ # type: (dict, PolicyDelivery, typing.List[str]) -> PolicyDelivery
+ '''
+ Parses a JSON object `obj` and returns a `PolicyDelivery` object.
+ In addition to dicts (in the same format as to_json() outputs),
+ this method accepts the following placeholders:
+ "policy":
+ `target_policy_delivery`
+ "policyIfNonNull":
+ `target_policy_delivery` if its value is not None.
+ "anotherPolicy":
+ A PolicyDelivery that has the same key as
+ `target_policy_delivery` but a different value.
+ The delivery type is selected from `supported_delivery_types`.
+
+ Can raise `ShouldSkip`.
+ '''
+
+ if obj == "policy":
+ policy_delivery = target_policy_delivery
+ elif obj == "nonNullPolicy":
+ if target_policy_delivery.value is None:
+ raise ShouldSkip()
+ policy_delivery = target_policy_delivery
+ elif obj == "anotherPolicy":
+ if len(supported_delivery_types) == 0:
+ raise ShouldSkip()
+ policy_delivery = target_policy_delivery.get_another_policy(
+ supported_delivery_types[0])
+ elif isinstance(obj, dict):
+ policy_delivery = PolicyDelivery(obj['deliveryType'], obj['key'],
+ obj['value'])
+ else:
+ raise Exception('policy delivery is invalid: ' + obj)
+
+ # Omit unsupported combinations of source contexts and delivery type.
+ if policy_delivery.delivery_type not in supported_delivery_types:
+ raise ShouldSkip()
+
+ return policy_delivery
+
+ def to_json(self):
+ # type: () -> dict
+ return {
+ "deliveryType": self.delivery_type,
+ "key": self.key,
+ "value": self.value
+ }
+
+ def get_another_policy(self, delivery_type):
+ # type: (str) -> PolicyDelivery
+ if self.key == 'referrerPolicy':
+ # Return 'unsafe-url' (i.e. more unsafe policy than `self.value`)
+ # as long as possible, to make sure the tests to fail if the
+ # returned policy is used unexpectedly instead of `self.value`.
+ # Using safer policy wouldn't be distinguishable from acceptable
+ # arbitrary policy enforcement by user agents, as specified at
+ # Step 7 of
+ # https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer:
+ # "The user agent MAY alter referrerURL or referrerOrigin at this
+ # point to enforce arbitrary policy considerations in the
+ # interests of minimizing data leakage."
+ # See also the comments at `referrerUrlResolver` in
+ # `wpt/referrer-policy/generic/test-case.sub.js`.
+ if self.value != 'unsafe-url':
+ return PolicyDelivery(delivery_type, self.key, 'unsafe-url')
+ else:
+ return PolicyDelivery(delivery_type, self.key, 'no-referrer')
+ elif self.key == 'mixedContent':
+ if self.value == 'opt-in':
+ return PolicyDelivery(delivery_type, self.key, None)
+ else:
+ return PolicyDelivery(delivery_type, self.key, 'opt-in')
+ elif self.key == 'contentSecurityPolicy':
+ if self.value is not None:
+ return PolicyDelivery(delivery_type, self.key, None)
+ else:
+ return PolicyDelivery(delivery_type, self.key, 'worker-src-none')
+ elif self.key == 'upgradeInsecureRequests':
+ if self.value == 'upgrade':
+ return PolicyDelivery(delivery_type, self.key, None)
+ else:
+ return PolicyDelivery(delivery_type, self.key, 'upgrade')
+ else:
+ raise Exception('delivery key is invalid: ' + self.key)
+
+
+class SourceContext(object):
+ def __init__(self, source_context_type, policy_deliveries):
+ # type: (unicode, typing.List[PolicyDelivery]) -> None
+ self.source_context_type = source_context_type
+ self.policy_deliveries = policy_deliveries
+
+ def __eq__(self, other):
+ return type(self) is type(other) and self.__dict__ == other.__dict__
+
+ @classmethod
+ def from_json(cls, obj, target_policy_delivery, source_context_schema):
+ '''
+ Parses a JSON object `obj` and returns a `SourceContext` object.
+
+ `target_policy_delivery` and `source_context_schema` are used for
+ policy delivery placeholders and filtering out unsupported
+ delivery types.
+
+ Can raise `ShouldSkip`.
+ '''
+ source_context_type = obj.get('sourceContextType')
+ policy_deliveries = PolicyDelivery.list_from_json(
+ obj.get('policyDeliveries'), target_policy_delivery,
+ source_context_schema['supported_delivery_type']
+ [source_context_type])
+ return SourceContext(source_context_type, policy_deliveries)
+
+ def to_json(self):
+ return {
+ "sourceContextType": self.source_context_type,
+ "policyDeliveries": [x.to_json() for x in self.policy_deliveries]
+ }
+
+
+class CustomEncoder(json.JSONEncoder):
+ '''
+ Used to dump dicts containing `SourceContext`/`PolicyDelivery` into JSON.
+ '''
+ def default(self, obj):
+ if isinstance(obj, SourceContext):
+ return obj.to_json()
+ if isinstance(obj, PolicyDelivery):
+ return obj.to_json()
+ return json.JSONEncoder.default(self, obj)